status
stringclasses
1 value
repo_name
stringclasses
31 values
repo_url
stringclasses
31 values
issue_id
int64
1
104k
title
stringlengths
4
369
body
stringlengths
0
254k
issue_url
stringlengths
37
56
pull_url
stringlengths
37
54
before_fix_sha
stringlengths
40
40
after_fix_sha
stringlengths
40
40
report_datetime
unknown
language
stringclasses
5 values
commit_datetime
unknown
updated_file
stringlengths
4
188
file_content
stringlengths
0
5.12M
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,511
[Feature][JsonSplit-api]schedule update interface
from #5498 remove the request parameter workerGroupId,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5511
https://github.com/apache/dolphinscheduler/pull/5761
d382a7ba8c454b41944958c6e42692843a765234
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
"2021-05-18T13:58:16Z"
java
"2021-07-07T10:15:19Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.mockito.ArgumentMatchers.isA; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.HashMap; import java.util.Map; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * scheduler controller test */ public class SchedulerControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); @MockBean private SchedulerService schedulerService; @Test public void testCreateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionCode","40"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Map<String, Object> serviceResult = new HashMap<>(); putMsg(serviceResult, Status.SUCCESS); serviceResult.put(Constants.DATA_LIST, 1); Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(serviceResult); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/create",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testUpdateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/update","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOnline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/online","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOffline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","28"); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/offline","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionId","40"); paramsMap.add("searchVal","test"); paramsMap.add("pageNo","1"); paramsMap.add("pageSize","30"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/list-paging","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleList() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","cxc_1113") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testPreviewSchedule() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","cxc_1113") .header(SESSION_ID, sessionId) .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testDeleteScheduleById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("scheduleId","37"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/delete","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,511
[Feature][JsonSplit-api]schedule update interface
from #5498 remove the request parameter workerGroupId,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5511
https://github.com/apache/dolphinscheduler/pull/5761
d382a7ba8c454b41944958c6e42692843a765234
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
"2021-05-18T13:58:16Z"
java
"2021-07-07T10:15:19Z"
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' // delete 'definitionList' from tasks const deleteDefinitionList = (tasks) => { const newTasks = [] tasks.forEach(item => { const newItem = Object.assign({}, item) if (newItem.dependence && newItem.dependence.dependTaskList) { newItem.dependence.dependTaskList.forEach(dependTaskItem => { if (dependTaskItem.dependItemList) { dependTaskItem.dependItemList.forEach(dependItem => { Reflect.deleteProperty(dependItem, 'definitionList') }) } }) } newTasks.push(newItem) }) return newTasks } export default { /** * Task status acquisition */ getTaskState ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/task-list-by-process-id`, { processInstanceId: payload }, res => { const arr = _.map(res.data.taskList, v => { return _.cloneDeep(_.assign(tasksState[v.state], { name: v.name, stateId: v.id, dependentResult: v.dependentResult })) }) resolve({ list: arr, processInstanceState: res.data.processInstanceState, taskList: res.data.taskList }) }).catch(e => { reject(e) }) }) }, /** * Update process definition status */ editProcessState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/release`, { processId: payload.processId, releaseState: payload.releaseState }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * get process definition versions pagination info */ getProcessDefinitionVersionsPage ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/versions`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * switch process definition version */ switchProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * delete process definition version */ deleteProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Update process instance status */ editExecutorsState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/execute`, { processInstanceId: payload.processInstanceId, executeType: payload.executeType }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Verify that the DGA map name exists */ verifDAGName ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/verify-name`, { name: payload }, res => { state.name = payload resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ getProcessDetails ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/select-by-id`, { processId: payload }, res => { // process definition code state.code = res.data.code // version state.version = res.data.version // name state.name = res.data.name // description state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info state.tasks = processDefinitionJson.tasks // tasks cache state.cacheTasks = {} processDefinitionJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processDefinitionJson.globalParams // timeout state.timeout = processDefinitionJson.timeout state.tenantId = processDefinitionJson.tenantId resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process definition DAG diagram details */ copyProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/copy`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ moveProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/move`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get all the items created by the logged in user */ getAllItems ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/created-and-authorized-project', {}, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance DAG diagram details */ getInstancedetail ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-by-id`, { processInstanceId: payload }, res => { // code state.code = res.data.processDefinitionCode // version state.version = res.data.processDefinitionVersion // name state.name = res.data.name // desc state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // process instance const processInstanceJson = JSON.parse(res.data.processInstanceJson) // tasks info state.tasks = processInstanceJson.tasks // tasks cache state.cacheTasks = {} processInstanceJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processInstanceJson.globalParams // timeout state.timeout = processInstanceJson.timeout state.tenantId = processInstanceJson.tenantId // startup parameters state.startup = _.assign(state.startup, _.pick(res.data, ['commandType', 'failureStrategy', 'processInstancePriority', 'workerGroup', 'warningType', 'warningGroupId', 'receivers', 'receiversCc'])) state.startup.commandParam = JSON.parse(res.data.commandParam) resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Create process definition */ saveDAGchart ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/save`, { processDefinitionJson: JSON.stringify(data), name: _.trim(state.name), description: _.trim(state.description), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects) }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Process definition update */ updateDefinition ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/update`, { processDefinitionJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), name: _.trim(state.name), description: _.trim(state.description), id: payload, releaseState: state.releaseState }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Process instance update */ updateInstance ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: state.tasks, tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/instance/update`, { processInstanceJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), processInstanceId: payload, syncDefine: state.syncDefine }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Get a list of process definitions (sub-workflow usage is not paged) */ getProcessList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.processListS.length) { resolve() return } io.get(`projects/${state.projectName}/process/list`, payload, res => { state.processListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions (list page usage with pagination) */ getProcessListP ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/list-paging`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of project */ getProjectList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.projectListS.length) { resolve() return } io.get('projects/query-project-list', payload, res => { state.projectListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions by project id */ getProcessByProjectId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/queryProcessDefinitionAllByProjectId`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get datasource */ getDatasourceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('datasources/list', { type: payload }, res => { resolve(res) }).catch(res => { reject(res) }) }) }, /** * get resources */ getResourcesList ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListS.length) { resolve() return } io.get('resources/list', { type: 'FILE' }, res => { state.resourcesListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get jar */ getResourcesListJar ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListJar.length) { resolve() return } io.get('resources/list/jar', { type: 'FILE' }, res => { state.resourcesListJar = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process instance */ getProcessInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/list-paging`, payload, res => { state.instanceListS = res.data.totalList resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get alarm list */ getNotifyGroupList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('alert-group/list', res => { state.notifyGroupListS = _.map(res.data, v => { return { id: v.id, code: v.groupName, disabled: false } }) resolve(_.cloneDeep(state.notifyGroupListS)) }).catch(res => { reject(res) }) }) }, /** * Process definition startup interface */ processStart ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-process-instance`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * View log */ getLog ({ state }, payload) { return new Promise((resolve, reject) => { io.get('log/detail', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance id according to the process definition id * @param taskId */ getSubProcessId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-sub-process`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Called before the process definition starts */ getStartCheck ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-check`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Create timing */ createSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/create`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Preview timing */ previewSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/preview`, payload, res => { resolve(res.data) // alert(res.data) }).catch(e => { reject(e) }) }) }, /** * Timing list paging */ getScheduleList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/list-paging`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timing online */ scheduleOffline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/offline`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timed offline */ scheduleOnline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/online`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Edit timing */ updateSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/update`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete process instance */ deleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete process instance */ batchDeleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete definition */ deleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete definition */ batchDeleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * export definition */ exportDefinition ({ state }, payload) { const downloadBlob = (data, fileNameS = 'json') => { if (!data) { return } const blob = new Blob([data]) const fileName = `${fileNameS}.json` if ('download' in document.createElement('a')) { // 不是IE浏览器 const url = window.URL.createObjectURL(blob) const link = document.createElement('a') link.style.display = 'none' link.href = url link.setAttribute('download', fileName) document.body.appendChild(link) link.click() document.body.removeChild(link) // 下载完成移除元素 window.URL.revokeObjectURL(url) // 释放掉blob对象 } else { // IE 10+ window.navigator.msSaveBlob(blob, fileName) } } io.get(`projects/${state.projectName}/process/export`, { processDefinitionIds: payload.processDefinitionIds }, res => { downloadBlob(res, payload.fileName) }, e => { }, { responseType: 'blob' }) }, /** * Process instance get variable */ getViewvariables ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-variables`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get udfs function based on data source */ getUdfList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/udf-func/list', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task instance list */ getTaskInstanceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/task-instance/list-paging`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Force fail/kill/need_fault_tolerance task success */ forceTaskSuccess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/task-instance/force-success`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task record list */ getTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query history task record list */ getHistoryTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/history-list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * tree chart */ getViewTree ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/view-tree`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * gantt chart */ getViewGantt ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-gantt`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query task node list */ getProcessTasksList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/gen-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * remove timing */ deleteTiming ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, getResourceId ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/queryResource', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * scheduler controller */ @Api(tags = "SCHEDULER_TAG") @RestController @RequestMapping("/projects/{projectCode}/schedule") public class SchedulerController extends BaseController { public static final String DEFAULT_WARNING_TYPE = "NONE"; public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM"; @Autowired private SchedulerService schedulerService; /** * create schedule * * @param loginUser login user * @param projectCode project code * @param processDefinitionCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode") long processDefinitionCode, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectCode, processDefinitionCode, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ @ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/update") @ApiException(UPDATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "id") Integer id, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * publish schedule setScheduleState * * @param loginUser login user * @param projectName project name * @param id scheduler id * @return publish result code */ @ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/online") @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); return returnDataList(result); } /** * offline schedule * * @param loginUser login user * @param projectName project name * @param id schedule id * @return operation result code */ @ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/offline") @ApiException(OFFLINE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); return returnDataList(result); } /** * query schedule list paging * * @param loginUser login user * @param projectName project name * @param processDefinitionId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") }) @GetMapping("/list-paging") @ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam Integer processDefinitionId, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { Map<String, Object> result = checkPageParams(pageNo, pageSize); if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); } searchVal = ParameterUtils.handleEscapes(searchVal); result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); return returnDataListPaging(result); } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, @PathVariable String projectName, @RequestParam("scheduleId") Integer scheduleId ) { Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); return returnDataList(result); } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES") @PostMapping("/list") @ApiException(QUERY_SCHEDULE_LIST_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName); return returnDataList(result); } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), }) @PostMapping("/preview") @ResponseStatus(HttpStatus.CREATED) @ApiException(PREVIEW_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "schedule") String schedule ) { Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * scheduler service */ public interface SchedulerService { /** * save schedule * * @param loginUser login user * @param projectCode project code * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * set schedule online or offline * * @param loginUser login user * @param projectName project name * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ Map<String, Object> setScheduleState(User loginUser, String projectName, Integer id, ReleaseState scheduleStatus); /** * query schedule * * @param loginUser login user * @param projectName project name * @param processDefineId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize); /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ Map<String, Object> queryScheduleList(User loginUser, String projectName); /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ void deleteSchedule(int projectId, int scheduleId); /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId); /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * scheduler service impl */ @Service public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); @Autowired private ProjectService projectService; @Autowired private ExecutorService executorService; @Autowired private MonitorService monitorService; @Autowired private ProcessService processService; @Autowired private ScheduleMapper scheduleMapper; @Autowired private ProjectMapper projectMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * save schedule * * @param loginUser login user * @param projectCode project name * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check work flow define release state ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); result = executorService.checkProcessDefinitionValid(processDefinition, processDefineCode); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } Schedule scheduleObj = new Schedule(); Date now = new Date(); scheduleObj.setProjectName(project.getName()); scheduleObj.setProcessDefinitionId(processDefinition.getId()); scheduleObj.setProcessDefinitionName(processDefinition.getName()); ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } scheduleObj.setStartTime(scheduleParam.getStartTime()); scheduleObj.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { logger.error("{} verify failure", scheduleParam.getCrontab()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); return result; } scheduleObj.setCrontab(scheduleParam.getCrontab()); scheduleObj.setTimezoneId(scheduleParam.getTimezoneId()); scheduleObj.setWarningType(warningType); scheduleObj.setWarningGroupId(warningGroupId); scheduleObj.setFailureStrategy(failureStrategy); scheduleObj.setCreateTime(now); scheduleObj.setUpdateTime(now); scheduleObj.setUserId(loginUser.getId()); scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setProcessInstancePriority(processInstancePriority); scheduleObj.setWorkerGroup(workerGroup); scheduleMapper.insert(scheduleObj); /** * updateProcessInstance receivers and cc by process definition id */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); // return scheduler object with ID result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); putMsg(result, Status.SUCCESS); result.put("scheduleId", scheduleObj.getId()); return result; } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @param scheduleStatus schedule status * @return update result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule schedule = scheduleMapper.selectById(id); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); return result; } /** * scheduling on-line status forbid modification */ if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { return result; } Date now = new Date(); // updateProcessInstance param if (StringUtils.isNotEmpty(scheduleExpression)) { ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } schedule.setStartTime(scheduleParam.getStartTime()); schedule.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); return result; } schedule.setCrontab(scheduleParam.getCrontab()); schedule.setTimezoneId(scheduleParam.getTimezoneId()); } if (warningType != null) { schedule.setWarningType(warningType); } schedule.setWarningGroupId(warningGroupId); if (failureStrategy != null) { schedule.setFailureStrategy(failureStrategy); } schedule.setWorkerGroup(workerGroup); schedule.setUpdateTime(now); schedule.setProcessInstancePriority(processInstancePriority); scheduleMapper.updateById(schedule); /** * updateProcessInstance recipients and cc by process definition ID */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); return result; } /** * set schedule online or offline * * @param loginUser login user * @param projectName project name * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> setScheduleState(User loginUser, String projectName, Integer id, ReleaseState scheduleStatus) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule scheduleObj = scheduleMapper.selectById(id); if (scheduleObj == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } // check schedule release state if (scheduleObj.getReleaseState() == scheduleStatus) { logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); return result; } if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); return result; } // check sub process definition release state List<Integer> subProcessDefineIds = new ArrayList<>(); processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); if (!subProcessDefineIds.isEmpty()) { List<ProcessDefinition> subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { /** * if there is no online process, exit directly */ if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", subProcessDefinition.getId(), subProcessDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); return result; } } } } } // check master server exists List<Server> masterServers = monitorService.getServerListFromRegistry(true); if (masterServers.isEmpty()) { putMsg(result, Status.MASTER_NOT_EXISTS); return result; } // set status scheduleObj.setReleaseState(scheduleStatus); scheduleMapper.updateById(scheduleObj); try { switch (scheduleStatus) { case ONLINE: logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); setSchedule(project.getId(), scheduleObj); break; case OFFLINE: logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); deleteSchedule(project.getId(), id); break; default: putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); return result; } } catch (Exception e) { result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); throw new ServiceException(result.get(Constants.MSG).toString()); } putMsg(result, Status.SUCCESS); return result; } /** * query schedule * * @param loginUser login user * @param projectName project name * @param processDefineId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @Override public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { HashMap<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); return result; } Page<Schedule> page = new Page<>(pageNo, pageSize); IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( page, processDefineId, searchVal ); PageInfo<Schedule> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int) scheduleIPage.getTotal()); pageInfo.setLists(scheduleIPage.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @Override public Map<String, Object> queryScheduleList(User loginUser, String projectName) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName); result.put(Constants.DATA_LIST, schedules); putMsg(result, Status.SUCCESS); return result; } public void setSchedule(int projectId, Schedule schedule) { logger.info("set schedule, project id: {}, scheduleId: {}", projectId, schedule.getId()); QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, projectId, schedule); } /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ @Override public void deleteSchedule(int projectId, int scheduleId) { logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); String jobName = QuartzExecutors.buildJobName(scheduleId); String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); throw new ServiceException("set offline failure"); } } /** * check valid * * @param result result * @param bool bool * @param status status * @return check result code */ private boolean checkValid(Map<String, Object> result, boolean bool, Status status) { // timeout is valid if (bool) { putMsg(result, status); return true; } return false; } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @Override public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Status resultEnum = (Status) checkResult.get(Constants.STATUS); if (resultEnum != Status.SUCCESS) { return checkResult; } Schedule schedule = scheduleMapper.selectById(scheduleId); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); return result; } // Determine if the login user is the owner of the schedule if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } // check schedule is already online if (schedule.getReleaseState() == ReleaseState.ONLINE) { putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); return result; } int delete = scheduleMapper.deleteById(scheduleId); if (delete > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); } return result; } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @Override public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) { Map<String, Object> result = new HashMap<>(); CronExpression cronExpression; ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); Date now = new Date(); Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); Date endTime = scheduleParam.getEndTime(); try { cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); } catch (ParseException e) { logger.error(e.getMessage(), e); putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return result; } List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString)); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.mockito.ArgumentMatchers.isA; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * scheduler controller test */ public class SchedulerControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); @MockBean private SchedulerService schedulerService; @Test public void testCreateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionCode","40"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/create",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testUpdateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/update",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOnline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/online","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOffline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","28"); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/offline","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionId","40"); paramsMap.add("searchVal","test"); paramsMap.add("pageNo","1"); paramsMap.add("pageSize","30"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/list-paging","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleList() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","cxc_1113") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testPreviewSchedule() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","cxc_1113") .header(SESSION_ID, sessionId) .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testDeleteScheduleById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("scheduleId","37"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/delete","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.service.impl.SchedulerServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; /** * scheduler service test */ @RunWith(PowerMockRunner.class) @PrepareForTest(QuartzExecutors.class) public class SchedulerServiceTest { @InjectMocks private SchedulerServiceImpl schedulerService; @Mock private MonitorService monitorService; @Mock private ProcessService processService; @Mock private ScheduleMapper scheduleMapper; @Mock private ProjectMapper projectMapper; @Mock private ProjectServiceImpl projectService; @Mock private QuartzExecutors quartzExecutors; @Before public void setUp() { quartzExecutors = PowerMockito.mock(QuartzExecutors.class); PowerMockito.mockStatic(QuartzExecutors.class); try { PowerMockito.doReturn(quartzExecutors).when(QuartzExecutors.class, "getInstance"); } catch (Exception e) { e.printStackTrace(); } } @Test public void testSetScheduleState() { String projectName = "test"; User loginUser = new User(); loginUser.setId(1); Map<String, Object> result = new HashMap<String, Object>(); Project project = getProject(projectName); ProcessDefinition processDefinition = new ProcessDefinition(); Schedule schedule = new Schedule(); schedule.setId(1); schedule.setProcessDefinitionId(1); schedule.setReleaseState(ReleaseState.OFFLINE); List<Server> masterServers = new ArrayList<>(); masterServers.add(new Server()); Mockito.when(scheduleMapper.selectById(1)).thenReturn(schedule); Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition); //hash no auth result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.ONLINE); Mockito.when(projectService.hasProjectAndPerm(loginUser, project, result)).thenReturn(true); //schedule not exists result = schedulerService.setScheduleState(loginUser, projectName, 2, ReleaseState.ONLINE); Assert.assertEquals(Status.SCHEDULE_CRON_NOT_EXISTS, result.get(Constants.STATUS)); //SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.OFFLINE); Assert.assertEquals(Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, result.get(Constants.STATUS)); //PROCESS_DEFINE_NOT_EXIST schedule.setProcessDefinitionId(2); result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.ONLINE); Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, result.get(Constants.STATUS)); schedule.setProcessDefinitionId(1); // PROCESS_DEFINE_NOT_RELEASE result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.ONLINE); Assert.assertEquals(Status.PROCESS_DEFINE_NOT_RELEASE, result.get(Constants.STATUS)); processDefinition.setReleaseState(ReleaseState.ONLINE); Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition); //MASTER_NOT_EXISTS result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.ONLINE); Assert.assertEquals(Status.MASTER_NOT_EXISTS, result.get(Constants.STATUS)); //set master Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(masterServers); //SUCCESS result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.ONLINE); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); //OFFLINE Mockito.when(quartzExecutors.deleteJob(null, null)).thenReturn(true); result = schedulerService.setScheduleState(loginUser, projectName, 1, ReleaseState.OFFLINE); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testDeleteSchedule() { Mockito.when(quartzExecutors.deleteJob("1", "1")).thenReturn(true); Mockito.when(quartzExecutors.buildJobGroupName(1)).thenReturn("1"); Mockito.when(quartzExecutors.buildJobName(1)).thenReturn("1"); boolean flag = true; try { schedulerService.deleteSchedule(1, 1); } catch (Exception e) { flag = false; } Assert.assertTrue(flag); } private Project getProject(String name) { Project project = new Project(); project.setName(name); project.setUserId(1); return project; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,763
[Feature][JsonSplit-api]schedule online/offline interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5763
https://github.com/apache/dolphinscheduler/pull/5764
cfa22d7c89bcd8e35b8a286b39b67b9b36b3b4dc
e4f427a8d8bf99754698e054845291a5223c2ea6
"2021-07-07T11:37:00Z"
java
"2021-07-08T05:59:40Z"
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' // delete 'definitionList' from tasks const deleteDefinitionList = (tasks) => { const newTasks = [] tasks.forEach(item => { const newItem = Object.assign({}, item) if (newItem.dependence && newItem.dependence.dependTaskList) { newItem.dependence.dependTaskList.forEach(dependTaskItem => { if (dependTaskItem.dependItemList) { dependTaskItem.dependItemList.forEach(dependItem => { Reflect.deleteProperty(dependItem, 'definitionList') }) } }) } newTasks.push(newItem) }) return newTasks } export default { /** * Task status acquisition */ getTaskState ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/task-list-by-process-id`, { processInstanceId: payload }, res => { const arr = _.map(res.data.taskList, v => { return _.cloneDeep(_.assign(tasksState[v.state], { name: v.name, stateId: v.id, dependentResult: v.dependentResult })) }) resolve({ list: arr, processInstanceState: res.data.processInstanceState, taskList: res.data.taskList }) }).catch(e => { reject(e) }) }) }, /** * Update process definition status */ editProcessState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/release`, { processId: payload.processId, releaseState: payload.releaseState }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * get process definition versions pagination info */ getProcessDefinitionVersionsPage ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/versions`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * switch process definition version */ switchProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * delete process definition version */ deleteProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Update process instance status */ editExecutorsState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/execute`, { processInstanceId: payload.processInstanceId, executeType: payload.executeType }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Verify that the DGA map name exists */ verifDAGName ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/verify-name`, { name: payload }, res => { state.name = payload resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ getProcessDetails ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/select-by-id`, { processId: payload }, res => { // process definition code state.code = res.data.code // version state.version = res.data.version // name state.name = res.data.name // description state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info state.tasks = processDefinitionJson.tasks // tasks cache state.cacheTasks = {} processDefinitionJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processDefinitionJson.globalParams // timeout state.timeout = processDefinitionJson.timeout state.tenantId = processDefinitionJson.tenantId resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process definition DAG diagram details */ copyProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/copy`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ moveProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/move`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get all the items created by the logged in user */ getAllItems ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/created-and-authorized-project', {}, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance DAG diagram details */ getInstancedetail ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-by-id`, { processInstanceId: payload }, res => { // code state.code = res.data.processDefinitionCode // version state.version = res.data.processDefinitionVersion // name state.name = res.data.name // desc state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // process instance const processInstanceJson = JSON.parse(res.data.processInstanceJson) // tasks info state.tasks = processInstanceJson.tasks // tasks cache state.cacheTasks = {} processInstanceJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processInstanceJson.globalParams // timeout state.timeout = processInstanceJson.timeout state.tenantId = processInstanceJson.tenantId // startup parameters state.startup = _.assign(state.startup, _.pick(res.data, ['commandType', 'failureStrategy', 'processInstancePriority', 'workerGroup', 'warningType', 'warningGroupId', 'receivers', 'receiversCc'])) state.startup.commandParam = JSON.parse(res.data.commandParam) resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Create process definition */ saveDAGchart ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/save`, { processDefinitionJson: JSON.stringify(data), name: _.trim(state.name), description: _.trim(state.description), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects) }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Process definition update */ updateDefinition ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/update`, { processDefinitionJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), name: _.trim(state.name), description: _.trim(state.description), id: payload, releaseState: state.releaseState }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Process instance update */ updateInstance ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: state.tasks, tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/instance/update`, { processInstanceJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), processInstanceId: payload, syncDefine: state.syncDefine }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Get a list of process definitions (sub-workflow usage is not paged) */ getProcessList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.processListS.length) { resolve() return } io.get(`projects/${state.projectName}/process/list`, payload, res => { state.processListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions (list page usage with pagination) */ getProcessListP ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/list-paging`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of project */ getProjectList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.projectListS.length) { resolve() return } io.get('projects/query-project-list', payload, res => { state.projectListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions by project id */ getProcessByProjectId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/queryProcessDefinitionAllByProjectId`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get datasource */ getDatasourceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('datasources/list', { type: payload }, res => { resolve(res) }).catch(res => { reject(res) }) }) }, /** * get resources */ getResourcesList ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListS.length) { resolve() return } io.get('resources/list', { type: 'FILE' }, res => { state.resourcesListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get jar */ getResourcesListJar ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListJar.length) { resolve() return } io.get('resources/list/jar', { type: 'FILE' }, res => { state.resourcesListJar = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process instance */ getProcessInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/list-paging`, payload, res => { state.instanceListS = res.data.totalList resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get alarm list */ getNotifyGroupList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('alert-group/list', res => { state.notifyGroupListS = _.map(res.data, v => { return { id: v.id, code: v.groupName, disabled: false } }) resolve(_.cloneDeep(state.notifyGroupListS)) }).catch(res => { reject(res) }) }) }, /** * Process definition startup interface */ processStart ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-process-instance`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * View log */ getLog ({ state }, payload) { return new Promise((resolve, reject) => { io.get('log/detail', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance id according to the process definition id * @param taskId */ getSubProcessId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-sub-process`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Called before the process definition starts */ getStartCheck ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-check`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Create timing */ createSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/create`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Preview timing */ previewSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/preview`, payload, res => { resolve(res.data) // alert(res.data) }).catch(e => { reject(e) }) }) }, /** * Timing list paging */ getScheduleList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/list-paging`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timing online */ scheduleOffline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/offline`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timed offline */ scheduleOnline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/online`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Edit timing */ updateSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/update`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete process instance */ deleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete process instance */ batchDeleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete definition */ deleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete definition */ batchDeleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * export definition */ exportDefinition ({ state }, payload) { const downloadBlob = (data, fileNameS = 'json') => { if (!data) { return } const blob = new Blob([data]) const fileName = `${fileNameS}.json` if ('download' in document.createElement('a')) { // 不是IE浏览器 const url = window.URL.createObjectURL(blob) const link = document.createElement('a') link.style.display = 'none' link.href = url link.setAttribute('download', fileName) document.body.appendChild(link) link.click() document.body.removeChild(link) // 下载完成移除元素 window.URL.revokeObjectURL(url) // 释放掉blob对象 } else { // IE 10+ window.navigator.msSaveBlob(blob, fileName) } } io.get(`projects/${state.projectName}/process/export`, { processDefinitionIds: payload.processDefinitionIds }, res => { downloadBlob(res, payload.fileName) }, e => { }, { responseType: 'blob' }) }, /** * Process instance get variable */ getViewvariables ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-variables`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get udfs function based on data source */ getUdfList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/udf-func/list', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task instance list */ getTaskInstanceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/task-instance/list-paging`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Force fail/kill/need_fault_tolerance task success */ forceTaskSuccess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/task-instance/force-success`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task record list */ getTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query history task record list */ getHistoryTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/history-list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * tree chart */ getViewTree ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/view-tree`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * gantt chart */ getViewGantt ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-gantt`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query task node list */ getProcessTasksList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/gen-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * remove timing */ deleteTiming ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, getResourceId ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/queryResource', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,738
[Improvement][UI] The cancel button in the pop-up dialog of `batch copy` and `batch move` doesn't work.
The cancel button in the pop-up dialog of `batch copy` and `batch move` doesn't work. When we click the cancel button, the dialog can't close. ![image](https://user-images.githubusercontent.com/52202080/124266707-e251e900-db69-11eb-9671-d726995ab699.png)
https://github.com/apache/dolphinscheduler/issues/5738
https://github.com/apache/dolphinscheduler/pull/5739
7a18adae5261e37def2ca1ede0320043d18abfab
b114d330ac1fa7de27e09cc73c0804a7536f3b28
"2021-07-02T11:18:26Z"
java
"2021-07-08T10:18:20Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/relatedItems.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <m-popup ref="popup" :ok-text="$t('Confirm')" :nameText="$t('Related items')" @ok="_ok"> <template slot="content"> <div class="create-tenement-model"> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Project Name')}}</template> <template slot="content"> <el-select v-model="itemId" size="small"> <el-option v-for="item in itemList" :key="item.id" :value="item.id" :label="item.name"> </el-option> </el-select> </template> </m-list-box-f> </div> </template> </m-popup> </template> <script> import i18n from '@/module/i18n' import store from '@/conf/home/store' import mPopup from '@/module/components/popup/popup' import mListBoxF from '@/module/components/listBoxF/listBoxF' export default { name: 'create-tenement', data () { return { store, itemList: [], itemId: '' } }, props: { tmp: Boolean }, methods: { _ok () { if (this._verification()) { if (this.tmp) { this.$emit('onBatchMove', this.itemId) } else { this.$emit('onBatchCopy', this.itemId) } } }, _verification () { if (!this.itemId) { this.$message.warning(`${i18n.$t('Project name is required')}`) return false } return true } }, watch: { }, created () { this.store.dispatch('dag/getAllItems', {}).then(res => { if (res.data.length > 0) { this.itemList = res.data } }) }, mounted () { }, components: { mPopup, mListBoxF } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,741
[Improvement][Worker] Improve task process status log
**Describe the question** In the AbstractCommandExecutor, after the task process executed, it will log the status. We need to move the log from line 206 to line 227, the `result.getExitStatusCode()` is always 0, because we did not assign value. https://github.com/apache/dolphinscheduler/blob/2ba569acd028c00c22f4853de7c58251ac72816c/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L157-L206 **Which version of DolphinScheduler:** -[1.3.6] -[dev]
https://github.com/apache/dolphinscheduler/issues/5741
https://github.com/apache/dolphinscheduler/pull/5776
b114d330ac1fa7de27e09cc73c0804a7536f3b28
ab527a5e5abd04243305a50f184d8009b9edf21a
"2021-07-03T08:43:03Z"
java
"2021-07-08T16:28:00Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_KILL; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; /** * abstract command executor */ public abstract class AbstractCommandExecutor { /** * rules for extracting application ID */ protected static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); protected StringBuilder varPool = new StringBuilder(); /** * process */ private Process process; /** * log handler */ protected Consumer<List<String>> logHandler; /** * logger */ protected Logger logger; /** * log list */ protected final List<String> logBuffer; protected boolean logOutputIsScuccess = false; /** * taskExecutionContext */ protected TaskExecutionContext taskExecutionContext; /** * taskExecutionContextCacheManager */ private TaskExecutionContextCacheManager taskExecutionContextCacheManager; public AbstractCommandExecutor(Consumer<List<String>> logHandler, TaskExecutionContext taskExecutionContext, Logger logger) { this.logHandler = logHandler; this.taskExecutionContext = taskExecutionContext; this.logger = logger; this.logBuffer = Collections.synchronizedList(new ArrayList<>()); this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); } protected AbstractCommandExecutor(List<String> logBuffer) { this.logBuffer = logBuffer; } /** * build process * * @param commandFile command file * @throws IOException IO Exception */ private void buildProcess(String commandFile) throws IOException { // setting up user to run commands List<String> command = new LinkedList<>(); //init process builder ProcessBuilder processBuilder = new ProcessBuilder(); // setting up a working directory processBuilder.directory(new File(taskExecutionContext.getExecutePath())); // merge error information to standard output stream processBuilder.redirectErrorStream(true); // setting up user to run commands if (!OSUtils.isWindows() && CommonUtils.isSudoEnable()) { command.add("sudo"); command.add("-u"); command.add(taskExecutionContext.getTenantCode()); } command.add(commandInterpreter()); command.addAll(commandOptions()); command.add(commandFile); // setting commands processBuilder.command(command); process = processBuilder.start(); // print command printCommand(command); } /** * task specific execution logic * * @param execCommand execCommand * @return CommandExecuteResult * @throws Exception if error throws Exception */ public CommandExecuteResult run(String execCommand) throws Exception { CommandExecuteResult result = new CommandExecuteResult(); int taskInstanceId = taskExecutionContext.getTaskInstanceId(); // If the task has been killed, then the task in the cache is null if (null == taskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId)) { result.setExitStatusCode(EXIT_CODE_KILL); return result; } if (StringUtils.isEmpty(execCommand)) { taskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); return result; } String commandFilePath = buildCommandFilePath(); // create command file if not exists createCommandFileIfNotExists(execCommand, commandFilePath); //build process buildProcess(commandFilePath); // parse process output parseProcessOutput(process); Integer processId = getProcessId(process); result.setProcessId(processId); // cache processId taskExecutionContext.setProcessId(processId); boolean updateTaskExecutionContextStatus = taskExecutionContextCacheManager.updateTaskExecutionContext(taskExecutionContext); if (Boolean.FALSE.equals(updateTaskExecutionContextStatus)) { ProcessUtils.kill(taskExecutionContext); result.setExitStatusCode(EXIT_CODE_KILL); return result; } // print process id logger.info("process start, process id is: {}", processId); // if timeout occurs, exit directly long remainTime = getRemaintime(); // waiting for the run to finish boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{} ,processWaitForStatus:{} ,processExitValue:{}", taskExecutionContext.getExecutePath(), processId, result.getExitStatusCode(), status, process.exitValue()); // if SHELL task exit if (status) { // set appIds List<String> appIds = getAppIds(taskExecutionContext.getLogPath()); result.setAppIds(String.join(Constants.COMMA, appIds)); // SHELL task state result.setExitStatusCode(process.exitValue()); // if yarn task , yarn state is final state if (process.exitValue() == 0) { result.setExitStatusCode(isSuccessOfYarnState(appIds) ? EXIT_CODE_SUCCESS : EXIT_CODE_FAILURE); } } else { logger.error("process has failure , exitStatusCode:{}, processExitValue:{}, ready to kill ...", result.getExitStatusCode(), process.exitValue()); ProcessUtils.kill(taskExecutionContext); result.setExitStatusCode(EXIT_CODE_FAILURE); } return result; } public String getVarPool() { return varPool.toString(); } /** * cancel application * * @throws Exception exception */ public void cancelApplication() throws Exception { if (process == null) { return; } // clear log clear(); int processId = getProcessId(process); logger.info("cancel process: {}", processId); // kill , waiting for completion boolean killed = softKill(processId); if (!killed) { // hard kill hardKill(processId); // destory process.destroy(); process = null; } } /** * soft kill * * @param processId process id * @return process is alive * @throws InterruptedException interrupted exception */ private boolean softKill(int processId) { if (processId != 0 && process.isAlive()) { try { // sudo -u user command to run command String cmd = String.format("kill %d", processId); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("soft kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { logger.info("kill attempt failed", e); } } return !process.isAlive(); } /** * hard kill * * @param processId process id */ private void hardKill(int processId) { if (processId != 0 && process.isAlive()) { try { String cmd = String.format("kill -9 %d", processId); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("hard kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { logger.error("kill attempt failed ", e); } } } /** * print command * * @param commands process builder */ private void printCommand(List<String> commands) { String cmdStr; try { cmdStr = ProcessUtils.buildCommandStr(commands); logger.info("task run command:\n{}", cmdStr); } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * clear */ private void clear() { List<String> markerList = new ArrayList<>(); markerList.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString()); if (!logBuffer.isEmpty()) { // log handle logHandler.accept(logBuffer); logBuffer.clear(); } logHandler.accept(markerList); } /** * get the standard output of the process * * @param process process */ private void parseProcessOutput(Process process) { String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskExecutionContext.getTaskAppId()); ExecutorService getOutputLogService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName + "-" + "getOutputLogService"); getOutputLogService.submit(() -> { BufferedReader inReader = null; try { inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; logBuffer.add("welcome to use bigdata scheduling system..."); while ((line = inReader.readLine()) != null) { if (line.startsWith("${setValue(")) { varPool.append(line.substring("${setValue(".length(), line.length() - 2)); varPool.append("$VarPool$"); } else { logBuffer.add(line); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { logOutputIsScuccess = true; close(inReader); } }); getOutputLogService.shutdown(); ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); parseProcessOutputExecutorService.submit(() -> { try { long lastFlushTime = System.currentTimeMillis(); while (logBuffer.size() > 0 || !logOutputIsScuccess) { if (logBuffer.size() > 0) { lastFlushTime = flush(lastFlushTime); } else { Thread.sleep(Constants.DEFAULT_LOG_FLUSH_INTERVAL); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { clear(); } }); parseProcessOutputExecutorService.shutdown(); } /** * check yarn state * * @param appIds application id list * @return is success of yarn task state */ public boolean isSuccessOfYarnState(List<String> appIds) { boolean result = true; try { for (String appId : appIds) { logger.info("check yarn application status, appId:{}", appId); while (Stopper.isRunning()) { ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); if (logger.isDebugEnabled()) { logger.debug("check yarn application status, appId:{}, final state:{}", appId, applicationStatus.name()); } if (applicationStatus.equals(ExecutionStatus.FAILURE) || applicationStatus.equals(ExecutionStatus.KILL)) { return false; } if (applicationStatus.equals(ExecutionStatus.SUCCESS)) { break; } ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } } } catch (Exception e) { logger.error("yarn applications: {} , query status failed, exception:{}", StringUtils.join(appIds, ","), e); result = false; } return result; } public int getProcessId() { return getProcessId(process); } /** * get app links * * @param logPath log path * @return app id list */ private List<String> getAppIds(String logPath) { List<String> logs = convertFile2List(logPath); List<String> appIds = new ArrayList<>(); /** * analysis log?get submited yarn application id */ for (String log : logs) { String appId = findAppId(log); if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) { logger.info("find app id: {}", appId); appIds.add(appId); } } return appIds; } /** * convert file to list * * @param filename file name * @return line list */ private List<String> convertFile2List(String filename) { List lineList = new ArrayList<String>(100); File file = new File(filename); if (!file.exists()) { return lineList; } BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8)); String line = null; while ((line = br.readLine()) != null) { lineList.add(line); } } catch (Exception e) { logger.error(String.format("read file: %s failed : ", filename), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } return lineList; } /** * find app id * * @param line line * @return appid */ private String findAppId(String line) { Matcher matcher = APPLICATION_REGEX.matcher(line); if (matcher.find()) { return matcher.group(); } return null; } /** * get remain time(s) * * @return remain time */ private long getRemaintime() { long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000; long remainTime = taskExecutionContext.getTaskTimeout() - usedTime; if (remainTime < 0) { throw new RuntimeException("task execution time out"); } return remainTime; } /** * get process id * * @param process process * @return process id */ private int getProcessId(Process process) { int processId = 0; try { Field f = process.getClass().getDeclaredField(Constants.PID); f.setAccessible(true); processId = f.getInt(process); } catch (Throwable e) { logger.error(e.getMessage(), e); } return processId; } /** * when log buffer siz or flush time reach condition , then flush * * @param lastFlushTime last flush time * @return last flush time */ private long flush(long lastFlushTime) { long now = System.currentTimeMillis(); /** * when log buffer siz or flush time reach condition , then flush */ if (logBuffer.size() >= Constants.DEFAULT_LOG_ROWS_NUM || now - lastFlushTime > Constants.DEFAULT_LOG_FLUSH_INTERVAL) { lastFlushTime = now; /** log handle */ logHandler.accept(logBuffer); logBuffer.clear(); } return lastFlushTime; } /** * close buffer reader * * @param inReader in reader */ private void close(BufferedReader inReader) { if (inReader != null) { try { inReader.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } protected List<String> commandOptions() { return Collections.emptyList(); } protected abstract String buildCommandFilePath(); protected abstract String commandInterpreter(); protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * scheduler controller */ @Api(tags = "SCHEDULER_TAG") @RestController @RequestMapping("/projects/{projectCode}/schedule") public class SchedulerController extends BaseController { public static final String DEFAULT_WARNING_TYPE = "NONE"; public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM"; @Autowired private SchedulerService schedulerService; /** * create schedule * * @param loginUser login user * @param projectCode project code * @param processDefinitionCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode") long processDefinitionCode, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectCode, processDefinitionCode, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ @ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/update") @ApiException(UPDATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "id") Integer id, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * publish schedule setScheduleState * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @return publish result code */ @ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/online") @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectCode, id, ReleaseState.ONLINE); return returnDataList(result); } /** * offline schedule * * @param loginUser login user * @param projectCode project code * @param id schedule id * @return operation result code */ @ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/offline") @ApiException(OFFLINE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectCode, id, ReleaseState.OFFLINE); return returnDataList(result); } /** * query schedule list paging * * @param loginUser login user * @param projectName project name * @param processDefinitionId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") }) @GetMapping("/list-paging") @ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam Integer processDefinitionId, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { Map<String, Object> result = checkPageParams(pageNo, pageSize); if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); } searchVal = ParameterUtils.handleEscapes(searchVal); result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); return returnDataListPaging(result); } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, @PathVariable String projectName, @RequestParam("scheduleId") Integer scheduleId ) { Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); return returnDataList(result); } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES") @PostMapping("/list") @ApiException(QUERY_SCHEDULE_LIST_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName); return returnDataList(result); } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), }) @PostMapping("/preview") @ResponseStatus(HttpStatus.CREATED) @ApiException(PREVIEW_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "schedule") String schedule ) { Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * scheduler service */ public interface SchedulerService { /** * save schedule * * @param loginUser login user * @param projectCode project code * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * set schedule online or offline * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ Map<String, Object> setScheduleState(User loginUser, long projectCode, Integer id, ReleaseState scheduleStatus); /** * query schedule * * @param loginUser login user * @param projectName project name * @param processDefineId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize); /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ Map<String, Object> queryScheduleList(User loginUser, String projectName); /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ void deleteSchedule(int projectId, int scheduleId); /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId); /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * scheduler service impl */ @Service public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); @Autowired private ProjectService projectService; @Autowired private ExecutorService executorService; @Autowired private MonitorService monitorService; @Autowired private ProcessService processService; @Autowired private ScheduleMapper scheduleMapper; @Autowired private ProjectMapper projectMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * save schedule * * @param loginUser login user * @param projectCode project name * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check work flow define release state ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); result = executorService.checkProcessDefinitionValid(processDefinition, processDefineCode); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } Schedule scheduleObj = new Schedule(); Date now = new Date(); scheduleObj.setProjectName(project.getName()); scheduleObj.setProcessDefinitionId(processDefinition.getId()); scheduleObj.setProcessDefinitionName(processDefinition.getName()); ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } scheduleObj.setStartTime(scheduleParam.getStartTime()); scheduleObj.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { logger.error("{} verify failure", scheduleParam.getCrontab()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); return result; } scheduleObj.setCrontab(scheduleParam.getCrontab()); scheduleObj.setTimezoneId(scheduleParam.getTimezoneId()); scheduleObj.setWarningType(warningType); scheduleObj.setWarningGroupId(warningGroupId); scheduleObj.setFailureStrategy(failureStrategy); scheduleObj.setCreateTime(now); scheduleObj.setUpdateTime(now); scheduleObj.setUserId(loginUser.getId()); scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setProcessInstancePriority(processInstancePriority); scheduleObj.setWorkerGroup(workerGroup); scheduleMapper.insert(scheduleObj); /** * updateProcessInstance receivers and cc by process definition id */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); // return scheduler object with ID result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); putMsg(result, Status.SUCCESS); result.put("scheduleId", scheduleObj.getId()); return result; } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @param scheduleStatus schedule status * @return update result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule schedule = scheduleMapper.selectById(id); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); return result; } /** * scheduling on-line status forbid modification */ if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { return result; } Date now = new Date(); // updateProcessInstance param if (StringUtils.isNotEmpty(scheduleExpression)) { ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } schedule.setStartTime(scheduleParam.getStartTime()); schedule.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); return result; } schedule.setCrontab(scheduleParam.getCrontab()); schedule.setTimezoneId(scheduleParam.getTimezoneId()); } if (warningType != null) { schedule.setWarningType(warningType); } schedule.setWarningGroupId(warningGroupId); if (failureStrategy != null) { schedule.setFailureStrategy(failureStrategy); } schedule.setWorkerGroup(workerGroup); schedule.setUpdateTime(now); schedule.setProcessInstancePriority(processInstancePriority); scheduleMapper.updateById(schedule); /** * updateProcessInstance recipients and cc by process definition ID */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); return result; } /** * set schedule online or offline * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> setScheduleState(User loginUser, long projectCode, Integer id, ReleaseState scheduleStatus) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule scheduleObj = scheduleMapper.selectById(id); if (scheduleObj == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } // check schedule release state if (scheduleObj.getReleaseState() == scheduleStatus) { logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); return result; } if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); return result; } // check sub process definition release state List<Integer> subProcessDefineIds = new ArrayList<>(); processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); if (!subProcessDefineIds.isEmpty()) { List<ProcessDefinition> subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { /** * if there is no online process, exit directly */ if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", subProcessDefinition.getId(), subProcessDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); return result; } } } } } // check master server exists List<Server> masterServers = monitorService.getServerListFromRegistry(true); if (masterServers.isEmpty()) { putMsg(result, Status.MASTER_NOT_EXISTS); return result; } // set status scheduleObj.setReleaseState(scheduleStatus); scheduleMapper.updateById(scheduleObj); try { switch (scheduleStatus) { case ONLINE: logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); setSchedule(project.getId(), scheduleObj); break; case OFFLINE: logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); deleteSchedule(project.getId(), id); break; default: putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); return result; } } catch (Exception e) { result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); throw new ServiceException(result.get(Constants.MSG).toString()); } putMsg(result, Status.SUCCESS); return result; } /** * query schedule * * @param loginUser login user * @param projectName project name * @param processDefineId process definition id * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @Override public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { HashMap<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); return result; } Page<Schedule> page = new Page<>(pageNo, pageSize); IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( page, processDefineId, searchVal ); PageInfo<Schedule> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int) scheduleIPage.getTotal()); pageInfo.setLists(scheduleIPage.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @Override public Map<String, Object> queryScheduleList(User loginUser, String projectName) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName); result.put(Constants.DATA_LIST, schedules); putMsg(result, Status.SUCCESS); return result; } public void setSchedule(int projectId, Schedule schedule) { logger.info("set schedule, project id: {}, scheduleId: {}", projectId, schedule.getId()); QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, projectId, schedule); } /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ @Override public void deleteSchedule(int projectId, int scheduleId) { logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); String jobName = QuartzExecutors.buildJobName(scheduleId); String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); throw new ServiceException("set offline failure"); } } /** * check valid * * @param result result * @param bool bool * @param status status * @return check result code */ private boolean checkValid(Map<String, Object> result, boolean bool, Status status) { // timeout is valid if (bool) { putMsg(result, status); return true; } return false; } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @Override public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Status resultEnum = (Status) checkResult.get(Constants.STATUS); if (resultEnum != Status.SUCCESS) { return checkResult; } Schedule schedule = scheduleMapper.selectById(scheduleId); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); return result; } // Determine if the login user is the owner of the schedule if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } // check schedule is already online if (schedule.getReleaseState() == ReleaseState.ONLINE) { putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); return result; } int delete = scheduleMapper.deleteById(scheduleId); if (delete > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); } return result; } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @Override public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) { Map<String, Object> result = new HashMap<>(); CronExpression cronExpression; ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); Date now = new Date(); Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); Date endTime = scheduleParam.getEndTime(); try { cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); } catch (ParseException e) { logger.error(e.getMessage(), e); putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return result; } List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString)); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.mockito.Mockito.doNothing; import java.text.MessageFormat; import java.util.HashMap; import java.util.Map; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; /** * abstract controller test */ @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) public class AbstractControllerTest { public static final String SESSION_ID = "sessionId"; protected MockMvc mockMvc; @Autowired private WebApplicationContext webApplicationContext; @Autowired private SessionService sessionService; protected User user; protected String sessionId; @MockBean RegistryClient registryClient; @Before public void setUp() { doNothing().when(registryClient).init(); mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); createSession(); } @After public void after() throws Exception { sessionService.signOut("127.0.0.1", user); } private void createSession() { User loginUser = new User(); loginUser.setId(1); loginUser.setUserType(UserType.GENERAL_USER); user = loginUser; String session = sessionService.createSession(loginUser, "127.0.0.1"); sessionId = session; Assert.assertTrue(StringUtils.isNotEmpty(session)); } public Map<String, Object> successResult() { Map<String, Object> serviceResult = new HashMap<>(); putMsg(serviceResult, Status.SUCCESS); serviceResult.put(Constants.DATA_LIST, "{}"); return serviceResult; } public void putMsg(Map<String, Object> result, Status status, Object... statusParams) { result.put(Constants.STATUS, status); if (statusParams != null && statusParams.length > 0) { result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); } else { result.put(Constants.MSG, status.getMsg()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.mockito.ArgumentMatchers.isA; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * scheduler controller test */ public class SchedulerControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); @MockBean private SchedulerService schedulerService; @Test public void testCreateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionCode","40"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/create",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testUpdateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/update",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOnline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), isA(ReleaseState.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/online",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOffline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","28"); Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), isA(ReleaseState.class))).thenReturn(successResult()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/offline",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionId","40"); paramsMap.add("searchVal","test"); paramsMap.add("pageNo","1"); paramsMap.add("pageSize","30"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/list-paging","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleList() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","cxc_1113") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testPreviewSchedule() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","cxc_1113") .header(SESSION_ID, sessionId) .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testDeleteScheduleById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("scheduleId","37"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/delete","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="list-model" style="position: relative;"> <div class="table-box"> <el-table :data="list" size="mini" style="width: 100%" @selection-change="_arrDelChange"> <el-table-column type="selection" width="50" :selectable="selectable"></el-table-column> <el-table-column prop="id" :label="$t('#')" width="50"></el-table-column> <el-table-column :label="$t('Process Name')" min-width="200"> <template slot-scope="scope"> <el-popover trigger="hover" placement="top"> <p>{{ scope.row.name }}</p> <div slot="reference" class="name-wrapper"> <router-link :to="{ path: `/projects/${projectId}/definition/list/${scope.row.id}` }" tag="a" class="links"> <span class="ellipsis">{{scope.row.name}}</span> </router-link> </div> </el-popover> </template> </el-table-column> <el-table-column :label="$t('State')"> <template slot-scope="scope"> {{_rtPublishStatus(scope.row.releaseState)}} </template> </el-table-column> <el-table-column :label="$t('Create Time')" width="135"> <template slot-scope="scope"> <span>{{scope.row.createTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Update Time')" width="135"> <template slot-scope="scope"> <span>{{scope.row.updateTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Description')"> <template slot-scope="scope"> <span>{{scope.row.description | filterNull}}</span> </template> </el-table-column> <el-table-column prop="modifyBy" :label="$t('Modify User')"></el-table-column> <el-table-column :label="$t('Timing state')"> <template slot-scope="scope"> <span v-if="scope.row.scheduleReleaseState === 'OFFLINE'" class="time_offline">{{$t('offline')}}</span> <span v-if="scope.row.scheduleReleaseState === 'ONLINE'" class="time_online">{{$t('online')}}</span> <span v-if="!scope.row.scheduleReleaseState">-</span> </template> </el-table-column> <el-table-column :label="$t('Operation')" width="335" fixed="right"> <template slot-scope="scope"> <el-tooltip :content="$t('Edit')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_edit(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Start')" placement="top" :enterable="false"> <span><el-button type="success" size="mini" :disabled="scope.row.releaseState !== 'ONLINE'" icon="el-icon-video-play" @click="_start(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Timing')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-time" :disabled="scope.row.releaseState !== 'ONLINE' || scope.row.scheduleReleaseState !== null" @click="_timing(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('online')" placement="top" :enterable="false"> <span><el-button type="warning" size="mini" v-if="scope.row.releaseState === 'OFFLINE'" icon="el-icon-upload2" @click="_poponline(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('offline')" placement="top" :enterable="false"> <span><el-button type="danger" size="mini" icon="el-icon-download" v-if="scope.row.releaseState === 'ONLINE'" @click="_downline(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Copy Workflow')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" :disabled="scope.row.releaseState === 'ONLINE'" icon="el-icon-document-copy" @click="_copyProcess(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Cron Manage')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-date" :disabled="scope.row.releaseState !== 'ONLINE'" @click="_timingManage(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Delete')" placement="top" :enterable="false"> <el-popconfirm :confirmButtonText="$t('Confirm')" :cancelButtonText="$t('Cancel')" icon="el-icon-info" iconColor="red" :title="$t('Delete?')" @onConfirm="_delete(scope.row,scope.row.id)" > <el-button type="danger" size="mini" icon="el-icon-delete" :disabled="scope.row.releaseState === 'ONLINE'" circle slot="reference"></el-button> </el-popconfirm> </el-tooltip> <el-tooltip :content="$t('TreeView')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-s-data" @click="_treeView(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Export')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-s-unfold" @click="_export(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Version Info')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-info" @click="_version(scope.row)" circle></el-button></span> </el-tooltip> </template> </el-table-column> </el-table> </div> <el-tooltip :content="$t('Delete')" placement="top"> <el-popconfirm :confirmButtonText="$t('Confirm')" :cancelButtonText="$t('Cancel')" :title="$t('Delete?')" @onConfirm="_delete({},-1)" > <el-button style="position: absolute; bottom: -48px; left: 19px;" type="primary" size="mini" :disabled="!strSelectIds" slot="reference">{{$t('Delete')}}</el-button> </el-popconfirm> </el-tooltip> <el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 80px;" @click="_batchExport(item)" >{{$t('Export')}}</el-button> <span><el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 140px;" @click="_batchCopy(item)" >{{$t('Batch copy')}}</el-button></span> <el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 225px;" @click="_batchMove(item)" >{{$t('Batch move')}}</el-button> <el-drawer :visible.sync="drawer" size="" :with-header="false"> <m-versions :versionData = versionData @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion" @closeVersion="closeVersion"></m-versions> </el-drawer> <el-dialog :title="$t('Please set the parameters before starting')" v-if="startDialog" :visible.sync="startDialog" width="auto"> <m-start :startData= "startData" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start> </el-dialog> <el-dialog :title="$t('Set parameters before timing')" :visible.sync="timingDialog" width="auto"> <m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing> </el-dialog> <el-dialog :title="$t('Info')" :visible.sync="relatedItemsDialog" width="auto"> <m-related-items :tmp="tmp" @onBatchCopy="onBatchCopy" @onBatchMove="onBatchMove" @closeRelatedItems="closeRelatedItems"></m-related-items> </el-dialog> </div> </template> <script> import _ from 'lodash' import mStart from './start' import mTiming from './timing' import mRelatedItems from './relatedItems' import { mapActions, mapState } from 'vuex' import { publishStatus } from '@/conf/home/pages/dag/_source/config' import mVersions from './versions' export default { name: 'definition-list', data () { return { list: [], strSelectIds: '', checkAll: false, drawer: false, versionData: { processDefinition: {}, processDefinitionVersions: [], total: null, pageNo: null, pageSize: null }, startDialog: false, startData: {}, timingDialog: false, timingData: { item: {}, type: '' }, relatedItemsDialog: false, tmp: false } }, props: { processList: Array, pageNo: Number, pageSize: Number }, methods: { ...mapActions('dag', ['editProcessState', 'getStartCheck', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion', 'moveProcess']), ...mapActions('security', ['getWorkerGroupsAll']), selectable (row, index) { if (row.releaseState === 'ONLINE') { return false } else { return true } }, _rtPublishStatus (code) { return _.filter(publishStatus, v => v.code === code)[0].desc }, _treeView (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/tree/${item.id}` }) }, /** * Start */ _start (item) { this.getWorkerGroupsAll() this.getStartCheck({ processDefinitionId: item.id }).then(res => { this.startData = item this.startDialog = true }).catch(e => { this.$message.error(e.msg || '') }) }, onUpdateStart () { this._onUpdate() this.startDialog = false }, closeStart () { this.startDialog = false }, /** * timing */ _timing (item) { this.timingData.item = item this.timingData.type = 'timing' this.timingDialog = true }, onUpdateTiming () { this._onUpdate() this.timingDialog = false }, closeTiming () { this.timingDialog = false }, /** * Timing manage */ _timingManage (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/list/timing/${item.id}` }) }, /** * delete */ _delete (item, i) { // remove tow++ if (i < 0) { this._batchDelete() return } // remove one this.deleteDefinition({ processDefinitionId: item.id }).then(res => { this._onUpdate() this.$message.success(res.msg) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * edit */ _edit (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/list/${item.id}` }) }, /** * Offline */ _downline (item) { this._upProcessState({ processId: item.id, releaseState: 'OFFLINE' }) }, /** * online */ _poponline (item) { this._upProcessState({ processId: item.id, releaseState: 'ONLINE' }) }, /** * copy */ _copyProcess (item) { this.copyProcess({ processDefinitionIds: item.id, targetProjectId: item.projectId }).then(res => { this.strSelectIds = '' this.$message.success(res.msg) // $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, /** * move */ _moveProcess (item) { this.moveProcess({ processDefinitionIds: item.id, targetProjectId: item.projectId }).then(res => { this.strSelectIds = '' this.$message.success(res.msg) $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, _export (item) { this.exportDefinition({ processDefinitionIds: item.id, fileName: item.name }).catch(e => { this.$message.error(e.msg || '') }) }, /** * switch version in process definition version list * * @param version the version user want to change * @param processDefinitionId the process definition id * @param fromThis fromThis */ mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { this.switchProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success($t('Switch Version Successfully')) this.$router.push({ path: `/projects/${this.projectId}/definition/list/${processDefinitionId}` }) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * Paging event of process definition versions * * @param pageNo page number * @param pageSize page size * @param processDefinitionId the process definition id of page version * @param fromThis fromThis */ mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionCode, fromThis }) { this.getProcessDefinitionVersionsPage({ pageNo: pageNo, pageSize: pageSize, processDefinitionCode: processDefinitionCode }).then(res => { this.versionData.processDefinitionVersions = res.data.lists this.versionData.total = res.data.totalCount this.versionData.pageSize = res.data.pageSize this.versionData.pageNo = res.data.currentPage }).catch(e => { this.$message.error(e.msg || '') }) }, /** * delete one version of process definition * * @param version the version need to delete * @param processDefinitionId the process definition id user want to delete * @param fromThis fromThis */ mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, processDefinitionCode, fromThis }) { this.deleteProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success(res.msg || '') this.mVersionGetProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: processDefinitionCode, fromThis: fromThis }) }).catch(e => { this.$message.error(e.msg || '') }) }, _version (item) { this.getProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: item.code }).then(res => { let processDefinitionVersions = res.data.lists let total = res.data.totalCount let pageSize = res.data.pageSize let pageNo = res.data.currentPage this.versionData.processDefinition = item this.versionData.processDefinitionVersions = processDefinitionVersions this.versionData.total = total this.versionData.pageNo = pageNo this.versionData.pageSize = pageSize this.drawer = true }).catch(e => { this.$message.error(e.msg || '') }) }, closeVersion () { this.drawer = false }, _batchExport () { this.exportDefinition({ processDefinitionIds: this.strSelectIds, fileName: 'process_' + new Date().getTime() }).then(res => { this._onUpdate() this.checkAll = false this.strSelectIds = '' }).catch(e => { this.strSelectIds = '' this.checkAll = false this.$message.error(e.msg) }) }, /** * Batch Copy */ _batchCopy () { this.relatedItemsDialog = true this.tmp = false }, onBatchCopy (item) { this._copyProcess({ id: this.strSelectIds, projectId: item }) this.relatedItemsDialog = false }, closeRelatedItems () { this.relatedItemsDialog = false }, /** * _batchMove */ _batchMove () { this.tmp = true this.relatedItemsDialog = true }, onBatchMove (item) { this._moveProcess({ id: this.strSelectIds, projectId: item }) this.relatedItemsDialog = false }, /** * Edit state */ _upProcessState (o) { this.editProcessState(o).then(res => { this.$message.success(res.msg) $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, _onUpdate () { this.$emit('on-update') }, /** * the array that to be delete */ _arrDelChange (v) { let arr = [] arr = _.map(v, 'id') this.strSelectIds = _.join(arr, ',') }, /** * batch delete */ _batchDelete () { this.batchDeleteDefinition({ processDefinitionIds: this.strSelectIds }).then(res => { this._onUpdate() this.checkAll = false this.strSelectIds = '' this.$message.success(res.msg) }).catch(e => { this.strSelectIds = '' this.checkAll = false this.$message.error(e.msg || '') }) } }, watch: { processList: { handler (a) { this.checkAll = false this.list = [] setTimeout(() => { this.list = _.cloneDeep(a) }) }, immediate: true, deep: true }, pageNo () { this.strSelectIds = '' } }, created () { }, mounted () { }, computed: { ...mapState('dag', ['projectId']) }, components: { mVersions, mStart, mTiming, mRelatedItems } } </script> <style lang="scss" rel="stylesheet/scss"> .time_online { background-color: #5cb85c; color: #fff; padding: 3px; } .time_offline { background-color: #ffc107; color: #fff; padding: 3px; } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div> <div class="conditions-box"> <!--<m-conditions @on-conditions="_onConditions"></m-conditions>--> </div> <div class="list-model" v-if="!isLoading"> <template v-if="list.length"> <div class="table-box"> <el-table :data="list" size="mini" style="width: 100%"> <el-table-column type="index" :label="$t('#')" width="50"></el-table-column> <el-table-column prop="processDefinitionName" :label="$t('Process Name')"></el-table-column> <el-table-column :label="$t('Start Time')" min-width="120"> <template slot-scope="scope"> <span>{{scope.row.startTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('End Time')" min-width="120"> <template slot-scope="scope"> <span>{{scope.row.endTime | formatDate}}</span> </template> </el-table-column> <el-table-column prop="crontab" :label="$t('crontab')"></el-table-column> <el-table-column prop="failureStrategy" :label="$t('Failure Strategy')"></el-table-column> <el-table-column :label="$t('State')"> <template slot-scope="scope"> <span>{{_rtReleaseState(scope.row.releaseState)}}</span> </template> </el-table-column> <el-table-column :label="$t('Create Time')" min-width="120"> <template slot-scope="scope"> <span>{{scope.row.createTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Update Time')" min-width="120"> <template slot-scope="scope"> <span>{{scope.row.updateTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Operation')" width="120"> <template slot-scope="scope"> <el-tooltip :content="$t('Edit')" placement="top"> <span><el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_editTiming(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('online')" placement="top" v-if="scope.row.releaseState === 'OFFLINE'"> <span><el-button type="warning" size="mini" icon="el-icon-upload2" @click="_online(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('offline')" placement="top" v-if="scope.row.releaseState === 'ONLINE'"> <span><el-button type="danger" size="mini" icon="el-icon-download" @click="_offline(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Delete')" placement="top"> <el-popconfirm :confirmButtonText="$t('Confirm')" :cancelButtonText="$t('Cancel')" icon="el-icon-info" iconColor="red" :title="$t('Delete?')" @onConfirm="_delete(scope.row,scope.row.id)" > <el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button> </el-popconfirm> </el-tooltip> </template> </el-table-column> </el-table> </div> <div class="page-box"> <el-pagination background @current-change="_page" @size-change="_pageSize" :page-size="pageSize" :current-page.sync="pageNo" :page-sizes="[10, 30, 50]" layout="sizes, prev, pager, next, jumper" :total="total"> </el-pagination> </div> </template> <template v-if="!list.length"> <m-no-data></m-no-data> </template> </div> <m-spin :is-spin="isLoading"></m-spin> <el-dialog :title="$t('Set parameters before timing')" :visible.sync="timingDialog" width="auto"> <m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing> </el-dialog> </div> </template> <script> import _ from 'lodash' import { mapActions } from 'vuex' import mSpin from '@/module/components/spin/spin' import mTiming from '../../pages/list/_source/timing' import mNoData from '@/module/components/noData/noData' import { publishStatus } from '@/conf/home/pages/dag/_source/config' export default { name: 'list', data () { return { isLoading: false, total: null, pageNo: 1, pageSize: 10, list: [], timingDialog: false, timingData: { item: {} } } }, props: { }, methods: { ...mapActions('dag', ['getScheduleList', 'scheduleOffline', 'scheduleOnline', 'deleteTiming']), /** * delete */ _delete (item, i) { this.deleteTiming({ scheduleId: item.id }).then(res => { this.pageNo = 1 this._getScheduleList('false') this.$message.success(res.msg) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * Close the delete layer */ _closeDelete (i) { this.$refs[`poptip-delete-${i}`][0].doClose() }, /** * return state */ _rtReleaseState (code) { return _.filter(publishStatus, v => v.code === code)[0].desc }, /** * page */ _page (val) { this.pageNo = val this._getScheduleList() }, _pageSize (val) { this.pageSize = val this._getScheduleList() }, /** * Inquire list */ _getScheduleList (flag) { this.isLoading = !flag this.getScheduleList({ processDefinitionId: this.$route.params.id, searchVal: '', pageNo: this.pageNo, pageSize: this.pageSize }).then(res => { this.list = [] setTimeout(() => { this.list = res.data.totalList }) this.total = res.data.total this.isLoading = false }).catch(e => { this.isLoading = false }) }, /** * search */ _onConditions (o) { this.searchVal = o.searchVal this.pageNo = 1 this._getScheduleList('false') }, /** * online */ _online (item) { this.pageNo = 1 this.scheduleOnline({ id: item.id }).then(res => { this.$message.success(res.msg) this._getScheduleList('false') }).catch(e => { this.$message.error(e.msg || '') }) }, /** * offline */ _offline (item) { this.pageNo = 1 this.scheduleOffline({ id: item.id }).then(res => { this.$message.success(res.msg) this._getScheduleList('false') }).catch(e => { this.$message.error(e.msg || '') }) }, /** * timing */ _editTiming (item) { this.timingData.item = item this.timingDialog = true }, onUpdateTiming () { this.pageNo = 1 this._getScheduleList('false') this.timingDialog = false }, closeTiming () { this.timingDialog = false } }, watch: {}, created () { this._getScheduleList() }, mounted () {}, components: { mSpin, mNoData, mTiming } } </script> <style lang="scss" rel="stylesheet/scss"> </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-ui/src/js/conf/home/router/index.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import Vue from 'vue' import store from '@/conf/home/store' import localStore from '@/module/util/localStorage' import i18n from '@/module/i18n/index.js' import config from '~/external/config' import Router from 'vue-router' Vue.use(Router) const router = new Router({ routes: [ { path: '/', name: 'index', redirect: { name: 'home' } }, { path: '/home', name: 'home', component: resolve => require(['../pages/home/index'], resolve), meta: { title: `${i18n.$t('Home')} - DolphinScheduler`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects', name: 'projects', component: resolve => require(['../pages/projects/index'], resolve), meta: { title: `${i18n.$t('Project')}` }, redirect: { name: 'projects-list' }, beforeEnter: (to, from, next) => { const blacklist = ['projects', 'projects-list'] if (!blacklist.includes(to.name) && to.params.projectId && to.params.projectId !== localStore.getItem('projectId')) { store.dispatch('projects/getProjectById', { projectId: to.params.projectId }).then(res => { store.commit('dag/setProjectId', res.id) store.commit('dag/setProjectCode', res.code) store.commit('dag/setProjectName', res.name) localStore.setItem('projectId', res.id) localStore.setItem('projectCode', res.code) localStore.setItem('projectName', res.name) next() }).catch(e => { next({ name: 'projects-list' }) }) } else { next() } }, children: [ { path: '/projects/list', name: 'projects-list', component: resolve => require(['../pages/projects/pages/list/index'], resolve), meta: { title: `${i18n.$t('Project')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/index', name: 'projects-index', component: resolve => require(['../pages/projects/pages/index/index'], resolve), meta: { title: `${i18n.$t('Project Home')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/kinship', name: 'projects-kinship', component: resolve => require(['../pages/projects/pages/kinship/index'], resolve), meta: { title: `${i18n.$t('Kinship')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/definition', name: 'definition', component: resolve => require(['../pages/projects/pages/definition/index'], resolve), meta: { title: `${i18n.$t('Process definition')}`, refreshInSwitchedTab: config.refreshInSwitchedTab }, redirect: { name: 'projects-definition-list' }, children: [ { path: '/projects/:projectId/definition/list', name: 'projects-definition-list', component: resolve => require(['../pages/projects/pages/definition/pages/list/index'], resolve), meta: { title: `${i18n.$t('Process definition')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/definition/list/:id', name: 'projects-definition-details', component: resolve => require(['../pages/projects/pages/definition/pages/details/index'], resolve), meta: { title: `${i18n.$t('Process definition details')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/definition/create', name: 'definition-create', component: resolve => require(['../pages/projects/pages/definition/pages/create/index'], resolve), meta: { title: `${i18n.$t('Create process definition')}` } }, { path: '/projects/:projectId/definition/tree/:id', name: 'definition-tree-view-index', component: resolve => require(['../pages/projects/pages/definition/pages/tree/index'], resolve), meta: { title: `${i18n.$t('TreeView')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/definition/list/timing/:id', name: 'definition-timing-details', component: resolve => require(['../pages/projects/pages/definition/timing/index'], resolve), meta: { title: `${i18n.$t('Scheduled task list')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } } ] }, { path: '/projects/:projectId/instance', name: 'instance', component: resolve => require(['../pages/projects/pages/instance/index'], resolve), meta: { title: `${i18n.$t('Process Instance')}` }, redirect: { name: 'projects-instance-list' }, children: [ { path: '/projects/:projectId/instance/list', name: 'projects-instance-list', component: resolve => require(['../pages/projects/pages/instance/pages/list/index'], resolve), meta: { title: `${i18n.$t('Process Instance')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/instance/list/:id', name: 'projects-instance-details', component: resolve => require(['../pages/projects/pages/instance/pages/details/index'], resolve), meta: { title: `${i18n.$t('Process instance details')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/instance/gantt/:id', name: 'instance-gantt-index', component: resolve => require(['../pages/projects/pages/instance/pages/gantt/index'], resolve), meta: { title: `${i18n.$t('Gantt')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } } ] }, { path: '/projects/:projectId/task-instance', name: 'task-instance', component: resolve => require(['../pages/projects/pages/taskInstance'], resolve), meta: { title: `${i18n.$t('Task Instance')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/task-record', name: 'task-record', component: resolve => require(['../pages/projects/pages/taskRecord'], resolve), meta: { title: `${i18n.$t('Task record')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/projects/:projectId/history-task-record', name: 'history-task-record', component: resolve => require(['../pages/projects/pages/historyTaskRecord'], resolve), meta: { title: `${i18n.$t('History task record')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } } ] }, { path: '/resource', name: 'resource', component: resolve => require(['../pages/resource/index'], resolve), redirect: { name: 'file' }, meta: { title: `${i18n.$t('Resources')}`, refreshInSwitchedTab: config.refreshInSwitchedTab }, children: [ { path: '/resource/file', name: 'file', component: resolve => require(['../pages/resource/pages/file/pages/list/index'], resolve), meta: { title: `${i18n.$t('File Manage')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/file/create', name: 'resource-file-create', component: resolve => require(['../pages/resource/pages/file/pages/create/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}` } }, { path: '/resource/file/createFolder', name: 'resource-file-createFolder', component: resolve => require(['../pages/resource/pages/file/pages/createFolder/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}` } }, { path: '/resource/file/subFileFolder/:id', name: 'resource-file-subFileFolder', component: resolve => require(['../pages/resource/pages/file/pages/subFileFolder/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/file/subFile/:id', name: 'resource-file-subFile', component: resolve => require(['../pages/resource/pages/file/pages/subFile/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/file/list/:id', name: 'resource-file-details', component: resolve => require(['../pages/resource/pages/file/pages/details/index'], resolve), meta: { title: `${i18n.$t('File Details')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/file/subdirectory/:id', name: 'resource-file-subdirectory', component: resolve => require(['../pages/resource/pages/file/pages/subdirectory/index'], resolve), meta: { title: `${i18n.$t('File Manage')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/file/edit/:id', name: 'resource-file-edit', component: resolve => require(['../pages/resource/pages/file/pages/edit/index'], resolve), meta: { title: `${i18n.$t('File Details')}` } }, { path: '/resource/udf', name: 'udf', component: resolve => require(['../pages/resource/pages/udf/index'], resolve), meta: { title: `${i18n.$t('UDF manage')}`, refreshInSwitchedTab: config.refreshInSwitchedTab }, children: [ { path: '/resource/udf', name: 'resource-udf', component: resolve => require(['../pages/resource/pages/udf/pages/resource/index'], resolve), meta: { title: `${i18n.$t('UDF Resources')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/udf/subUdfDirectory/:id', name: 'resource-udf-subUdfDirectory', component: resolve => require(['../pages/resource/pages/udf/pages/subUdfDirectory/index'], resolve), meta: { title: `${i18n.$t('UDF Resources')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/resource/udf/createUdfFolder', name: 'resource-udf-createUdfFolder', component: resolve => require(['../pages/resource/pages/udf/pages/createUdfFolder/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}` } }, { path: '/resource/udf/subCreateUdfFolder/:id', name: 'resource-udf-subCreateUdfFolder', component: resolve => require(['../pages/resource/pages/udf/pages/subUdfFolder/index'], resolve), meta: { title: `${i18n.$t('Create Resource')}` } }, { path: '/resource/func', name: 'resource-func', component: resolve => require(['../pages/resource/pages/udf/pages/function/index'], resolve), meta: { title: `${i18n.$t('UDF Function')}` } } ] } ] }, { path: '/datasource', name: 'datasource', component: resolve => require(['../pages/datasource/index'], resolve), meta: { title: `${i18n.$t('Datasource')}` }, redirect: { name: 'datasource-list' }, children: [ { path: '/datasource/list', name: 'datasource-list', component: resolve => require(['../pages/datasource/pages/list/index'], resolve), meta: { title: `${i18n.$t('Datasource')}` } } ] }, { path: '/security', name: 'security', component: resolve => require(['../pages/security/index'], resolve), meta: { title: `${i18n.$t('Security')}` }, redirect: { name: 'tenement-manage' }, children: [ { path: '/security/tenant', name: 'tenement-manage', component: resolve => require(['../pages/security/pages/tenement/index'], resolve), meta: { title: `${i18n.$t('Tenant Manage')}` } }, { path: '/security/users', name: 'users-manage', component: resolve => require(['../pages/security/pages/users/index'], resolve), meta: { title: `${i18n.$t('User Manage')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/security/warning-groups', name: 'warning-groups-manage', component: resolve => require(['../pages/security/pages/warningGroups/index'], resolve), meta: { title: `${i18n.$t('Warning group manage')}` } }, { path: '/security/warning-instance', name: 'warning-instance-manage', component: resolve => require(['../pages/security/pages/warningInstance/index'], resolve), meta: { title: `${i18n.$t('Warning instance manage')}` } }, { path: '/security/queue', name: 'queue-manage', component: resolve => require(['../pages/security/pages/queue/index'], resolve), meta: { title: `${i18n.$t('Queue manage')}` } }, { path: '/security/worker-groups', name: 'worker-groups-manage', component: resolve => require(['../pages/security/pages/workerGroups/index'], resolve), meta: { title: `${i18n.$t('Worker group manage')}` } }, { path: '/security/token', name: 'token-manage', component: resolve => require(['../pages/security/pages/token/index'], resolve), meta: { title: `${i18n.$t('Token manage')}` } } ] }, { path: '/user', name: 'user', component: resolve => require(['../pages/user/index'], resolve), meta: { title: `${i18n.$t('User Center')}` }, redirect: { name: 'account' }, children: [ { path: '/user/account', name: 'account', component: resolve => require(['../pages/user/pages/account/index'], resolve), meta: { title: `${i18n.$t('User Information')}` } }, { path: '/user/password', name: 'password', component: resolve => require(['../pages/user/pages/password/index'], resolve), meta: { title: `${i18n.$t('Edit password')}` } }, { path: '/user/token', name: 'token', component: resolve => require(['../pages/user/pages/token/index'], resolve), meta: { title: `${i18n.$t('Token manage')}` } } ] }, { path: '/monitor', name: 'monitor', component: resolve => require(['../pages/monitor/index'], resolve), meta: { title: 'monitor' }, redirect: { name: 'servers-master' }, children: [ { path: '/monitor/servers/master', name: 'servers-master', component: resolve => require(['../pages/monitor/pages/servers/master'], resolve), meta: { title: `${i18n.$t('Service-Master')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/worker', name: 'servers-worker', component: resolve => require(['../pages/monitor/pages/servers/worker'], resolve), meta: { title: `${i18n.$t('Service-Worker')}`, refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/alert', name: 'servers-alert', component: resolve => require(['../pages/monitor/pages/servers/alert'], resolve), meta: { title: 'Alert', refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/rpcserver', name: 'servers-rpcserver', component: resolve => require(['../pages/monitor/pages/servers/rpcserver'], resolve), meta: { title: 'Rpcserver', refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/zookeeper', name: 'servers-zookeeper', component: resolve => require(['../pages/monitor/pages/servers/zookeeper'], resolve), meta: { title: 'Zookeeper', refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/apiserver', name: 'servers-apiserver', component: resolve => require(['../pages/monitor/pages/servers/apiserver'], resolve), meta: { title: 'Apiserver', refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/db', name: 'servers-db', component: resolve => require(['../pages/monitor/pages/servers/db'], resolve), meta: { title: 'DB', refreshInSwitchedTab: config.refreshInSwitchedTab } }, { path: '/monitor/servers/statistics', name: 'statistics', component: resolve => require(['../pages/monitor/pages/servers/statistics'], resolve), meta: { title: 'statistics', refreshInSwitchedTab: config.refreshInSwitchedTab } } ] } ] }) const VueRouterPush = Router.prototype.push Router.prototype.push = function push (to) { return VueRouterPush.call(this, to).catch(err => err) } router.beforeEach((to, from, next) => { const $body = $('body') $body.find('.tooltip.fade.top.in').remove() if (to.meta.title) { document.title = `${to.meta.title} - DolphinScheduler` } next() }) export default router
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' // delete 'definitionList' from tasks const deleteDefinitionList = (tasks) => { const newTasks = [] tasks.forEach(item => { const newItem = Object.assign({}, item) if (newItem.dependence && newItem.dependence.dependTaskList) { newItem.dependence.dependTaskList.forEach(dependTaskItem => { if (dependTaskItem.dependItemList) { dependTaskItem.dependItemList.forEach(dependItem => { Reflect.deleteProperty(dependItem, 'definitionList') }) } }) } newTasks.push(newItem) }) return newTasks } export default { /** * Task status acquisition */ getTaskState ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/task-list-by-process-id`, { processInstanceId: payload }, res => { const arr = _.map(res.data.taskList, v => { return _.cloneDeep(_.assign(tasksState[v.state], { name: v.name, stateId: v.id, dependentResult: v.dependentResult })) }) resolve({ list: arr, processInstanceState: res.data.processInstanceState, taskList: res.data.taskList }) }).catch(e => { reject(e) }) }) }, /** * Update process definition status */ editProcessState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/release`, { processId: payload.processId, releaseState: payload.releaseState }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * get process definition versions pagination info */ getProcessDefinitionVersionsPage ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/versions`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * switch process definition version */ switchProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * delete process definition version */ deleteProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Update process instance status */ editExecutorsState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/execute`, { processInstanceId: payload.processInstanceId, executeType: payload.executeType }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Verify that the DGA map name exists */ verifDAGName ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/verify-name`, { name: payload }, res => { state.name = payload resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ getProcessDetails ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/select-by-id`, { processId: payload }, res => { // process definition code state.code = res.data.code // version state.version = res.data.version // name state.name = res.data.name // description state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info state.tasks = processDefinitionJson.tasks // tasks cache state.cacheTasks = {} processDefinitionJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processDefinitionJson.globalParams // timeout state.timeout = processDefinitionJson.timeout state.tenantId = processDefinitionJson.tenantId resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process definition DAG diagram details */ copyProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/copy`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ moveProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/move`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get all the items created by the logged in user */ getAllItems ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/created-and-authorized-project', {}, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance DAG diagram details */ getInstancedetail ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-by-id`, { processInstanceId: payload }, res => { // code state.code = res.data.processDefinitionCode // version state.version = res.data.processDefinitionVersion // name state.name = res.data.name // desc state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // process instance const processInstanceJson = JSON.parse(res.data.processInstanceJson) // tasks info state.tasks = processInstanceJson.tasks // tasks cache state.cacheTasks = {} processInstanceJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processInstanceJson.globalParams // timeout state.timeout = processInstanceJson.timeout state.tenantId = processInstanceJson.tenantId // startup parameters state.startup = _.assign(state.startup, _.pick(res.data, ['commandType', 'failureStrategy', 'processInstancePriority', 'workerGroup', 'warningType', 'warningGroupId', 'receivers', 'receiversCc'])) state.startup.commandParam = JSON.parse(res.data.commandParam) resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Create process definition */ saveDAGchart ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/save`, { processDefinitionJson: JSON.stringify(data), name: _.trim(state.name), description: _.trim(state.description), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects) }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Process definition update */ updateDefinition ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/update`, { processDefinitionJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), name: _.trim(state.name), description: _.trim(state.description), id: payload, releaseState: state.releaseState }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Process instance update */ updateInstance ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: state.tasks, tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/instance/update`, { processInstanceJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), processInstanceId: payload, syncDefine: state.syncDefine }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Get a list of process definitions (sub-workflow usage is not paged) */ getProcessList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.processListS.length) { resolve() return } io.get(`projects/${state.projectName}/process/list`, payload, res => { state.processListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions (list page usage with pagination) */ getProcessListP ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/list-paging`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of project */ getProjectList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.projectListS.length) { resolve() return } io.get('projects/query-project-list', payload, res => { state.projectListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions by project id */ getProcessByProjectId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/queryProcessDefinitionAllByProjectId`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get datasource */ getDatasourceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('datasources/list', { type: payload }, res => { resolve(res) }).catch(res => { reject(res) }) }) }, /** * get resources */ getResourcesList ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListS.length) { resolve() return } io.get('resources/list', { type: 'FILE' }, res => { state.resourcesListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get jar */ getResourcesListJar ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListJar.length) { resolve() return } io.get('resources/list/jar', { type: 'FILE' }, res => { state.resourcesListJar = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process instance */ getProcessInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/list-paging`, payload, res => { state.instanceListS = res.data.totalList resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get alarm list */ getNotifyGroupList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('alert-group/list', res => { state.notifyGroupListS = _.map(res.data, v => { return { id: v.id, code: v.groupName, disabled: false } }) resolve(_.cloneDeep(state.notifyGroupListS)) }).catch(res => { reject(res) }) }) }, /** * Process definition startup interface */ processStart ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-process-instance`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * View log */ getLog ({ state }, payload) { return new Promise((resolve, reject) => { io.get('log/detail', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance id according to the process definition id * @param taskId */ getSubProcessId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-sub-process`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Called before the process definition starts */ getStartCheck ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-check`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Create timing */ createSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/create`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Preview timing */ previewSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/preview`, payload, res => { resolve(res.data) // alert(res.data) }).catch(e => { reject(e) }) }) }, /** * Timing list paging */ getScheduleList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/list-paging`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timing online */ scheduleOffline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/offline`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timed offline */ scheduleOnline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/online`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Edit timing */ updateSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/update`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete process instance */ deleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete process instance */ batchDeleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete definition */ deleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete definition */ batchDeleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * export definition */ exportDefinition ({ state }, payload) { const downloadBlob = (data, fileNameS = 'json') => { if (!data) { return } const blob = new Blob([data]) const fileName = `${fileNameS}.json` if ('download' in document.createElement('a')) { // 不是IE浏览器 const url = window.URL.createObjectURL(blob) const link = document.createElement('a') link.style.display = 'none' link.href = url link.setAttribute('download', fileName) document.body.appendChild(link) link.click() document.body.removeChild(link) // 下载完成移除元素 window.URL.revokeObjectURL(url) // 释放掉blob对象 } else { // IE 10+ window.navigator.msSaveBlob(blob, fileName) } } io.get(`projects/${state.projectName}/process/export`, { processDefinitionIds: payload.processDefinitionIds }, res => { downloadBlob(res, payload.fileName) }, e => { }, { responseType: 'blob' }) }, /** * Process instance get variable */ getViewvariables ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-variables`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get udfs function based on data source */ getUdfList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/udf-func/list', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task instance list */ getTaskInstanceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/task-instance/list-paging`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Force fail/kill/need_fault_tolerance task success */ forceTaskSuccess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/task-instance/force-success`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task record list */ getTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query history task record list */ getHistoryTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/history-list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * tree chart */ getViewTree ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/view-tree`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * gantt chart */ getViewGantt ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-gantt`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query task node list */ getProcessTasksList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/gen-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * remove timing */ deleteTiming ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, getResourceId ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/queryResource', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,510
[Feature][JsonSplit-api]schedule list-paging interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5510
https://github.com/apache/dolphinscheduler/pull/5771
e4f427a8d8bf99754698e054845291a5223c2ea6
72535a47e3dafc68c457996ea6e01b8da17685aa
"2021-05-18T13:57:34Z"
java
"2021-07-09T02:13:00Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>1.3.6-SNAPSHOT</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <zookeeper.version>3.4.14</zookeeper.version> <spring.version>5.1.19.RELEASE</spring.version> <spring.boot.version>2.1.18.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>4.1.2</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <curator.test>2.12.0</curator.test> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-registry-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <version>${zookeeper.version}</version> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-client</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>log4j-1.2-api</groupId> <artifactId>org.apache.logging.log4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-test</artifactId> <version>${curator.test}</version> <scope>test</scope> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi-ooxml</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <!--registry plugin --> <include>**/plugin/registry/zookeeper/ZookeeperRegistryTest.java</include> <!-- API --> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessTaskRelationServiceImplTest.java</include> <include>**/api/service/TaskDefinitionServiceImplTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SparkParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include> <include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include> <include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include> <include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include> <include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include> <include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include> <include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include> <include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include> <include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include> <include>**/common/datasource/DatasourceUtilTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/command/log/GetLogBytesRequestCommandTest.java</include> <include>**/remote/command/log/GetLogBytesResponseCommandTest.java</include> <include>**/remote/command/log/ViewLogRequestCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/cache/impl/TaskInstanceCacheManagerImplTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/registry/MasterRegistryClientTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/registry/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/processdure/ProcedureTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/PythonCommandExecutorTest.java</include> <include>**/server/worker/task/TaskParamsTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/registry/RegistryClientTest.java</include> <include>**/service/registry/RegistryPluginTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/service/alert/ProcessAlertManagerTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/plugin/alert/slack/SlackAlertChannelFactoryTest.java</include> <include>**/plugin/alert/slack/SlackAlertPluginTest.java</include> <include>**/plugin/alert/slack/SlackSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/spi/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-registry-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import java.util.regex.Pattern; /** * Constants */ public final class Constants { private Constants() { throw new UnsupportedOperationException("Construct Constants"); } /** * quartz config */ public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass"; public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName"; public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId"; public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon"; public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties"; public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class"; public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount"; public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons"; public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority"; public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class"; public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix"; public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered"; public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold"; public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval"; public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock"; public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource"; public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class"; /** * quartz config default value */ public static final String QUARTZ_TABLE_PREFIX = "QRTZ_"; public static final String QUARTZ_MISFIRETHRESHOLD = "60000"; public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000"; public static final String QUARTZ_DATASOURCE = "myDs"; public static final String QUARTZ_THREADCOUNT = "25"; public static final String QUARTZ_THREADPRIORITY = "5"; public static final String QUARTZ_INSTANCENAME = "DolphinScheduler"; public static final String QUARTZ_INSTANCEID = "AUTO"; public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true"; /** * common properties path */ public static final String COMMON_PROPERTIES_PATH = "/common.properties"; /** * fs.defaultFS */ public static final String FS_DEFAULTFS = "fs.defaultFS"; /** * fs s3a endpoint */ public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; /** * fs s3a access key */ public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; /** * fs s3a secret key */ public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; /** * hadoop configuration */ public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; public static final String HADOOP_RM_STATE_STANDBY = "STANDBY"; public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port"; /** * yarn.resourcemanager.ha.rm.ids */ public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; /** * yarn.application.status.address */ public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; /** * yarn.job.history.status.address */ public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address"; /** * hdfs configuration * hdfs.root.user */ public static final String HDFS_ROOT_USER = "hdfs.root.user"; /** * hdfs/s3 configuration * resource.upload.path */ public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; /** * data basedir path */ public static final String DATA_BASEDIR_PATH = "data.basedir.path"; /** * dolphinscheduler.env.path */ public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path"; /** * environment properties default path */ public static final String ENV_PATH = "env/dolphinscheduler_env.sh"; /** * python home */ public static final String PYTHON_HOME = "PYTHON_HOME"; /** * resource.view.suffixs */ public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs"; public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js"; /** * development.state */ public static final String DEVELOPMENT_STATE = "development.state"; /** * sudo enable */ public static final String SUDO_ENABLE = "sudo.enable"; /** * string true */ public static final String STRING_TRUE = "true"; /** * string false */ public static final String STRING_FALSE = "false"; /** * resource storage type */ public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; /** * MasterServer directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_MASTERS = "/nodes/master"; /** * WorkerServer directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_WORKERS = "/nodes/worker"; /** * all servers directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers"; /** * registry node prefix */ public static final String REGISTRY_DOLPHINSCHEDULER_NODE = "/nodes"; /** * MasterServer lock directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters"; /** * MasterServer failover directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters"; /** * WorkerServer failover directory registered in zookeeper */ public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers"; /** * MasterServer startup failover runing and fault tolerance process */ public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters"; /** * comma , */ public static final String COMMA = ","; /** * slash / */ public static final String SLASH = "/"; /** * COLON : */ public static final String COLON = ":"; /** * SPACE " " */ public static final String SPACE = " "; /** * SINGLE_SLASH / */ public static final String SINGLE_SLASH = "/"; /** * DOUBLE_SLASH // */ public static final String DOUBLE_SLASH = "//"; /** * SINGLE_QUOTES "'" */ public static final String SINGLE_QUOTES = "'"; /** * DOUBLE_QUOTES "\"" */ public static final String DOUBLE_QUOTES = "\""; /** * SEMICOLON ; */ public static final String SEMICOLON = ";"; /** * EQUAL SIGN */ public static final String EQUAL_SIGN = "="; /** * AT SIGN */ public static final String AT_SIGN = "@"; /** * date format of yyyy-MM-dd HH:mm:ss */ public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; /** * date format of yyyyMMddHHmmss */ public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss"; /** * date format of yyyyMMddHHmmssSSS */ public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS"; /** * http connect time out */ public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000; /** * http connect request time out */ public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000; /** * httpclient soceket time out */ public static final int SOCKET_TIMEOUT = 60 * 1000; /** * http header */ public static final String HTTP_HEADER_UNKNOWN = "unKnown"; /** * http X-Forwarded-For */ public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For"; /** * http X-Real-IP */ public static final String HTTP_X_REAL_IP = "X-Real-IP"; /** * UTF-8 */ public static final String UTF_8 = "UTF-8"; /** * user name regex */ public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$"); /** * email regex */ public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"); /** * default display rows */ public static final int DEFAULT_DISPLAY_ROWS = 10; /** * read permission */ public static final int READ_PERMISSION = 2 * 1; /** * write permission */ public static final int WRITE_PERMISSION = 2 * 2; /** * execute permission */ public static final int EXECUTE_PERMISSION = 1; /** * default admin permission */ public static final int DEFAULT_ADMIN_PERMISSION = 7; /** * all permissions */ public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION; /** * max task timeout */ public static final int MAX_TASK_TIMEOUT = 24 * 3600; /** * master cpu load */ public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * worker cpu load */ public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * worker host weight */ public static final int DEFAULT_WORKER_HOST_WEIGHT = 100; /** * default log cache rows num,output when reach the number */ public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16; /** * log flush interval?output when reach the interval */ public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000; /** * time unit secong to minutes */ public static final int SEC_2_MINUTES_TIME_UNIT = 60; /*** * * rpc port */ public static final int RPC_PORT = 50051; /*** * alert rpc port */ public static final int ALERT_RPC_PORT = 50052; /** * forbid running task */ public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; /** * normal running task */ public static final String FLOWNODE_RUN_FLAG_NORMAL = "NORMAL"; /** * datasource configuration path */ public static final String DATASOURCE_PROPERTIES = "/datasource.properties"; public static final String DEFAULT = "Default"; public static final String USER = "user"; public static final String PASSWORD = "password"; public static final String XXXXXX = "******"; public static final String NULL = "NULL"; public static final String THREAD_NAME_MASTER_SERVER = "Master-Server"; public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server"; /** * command parameter keys */ public static final String CMD_PARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; public static final String CMD_PARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; public static final String CMD_PARAM_RECOVERY_WAITING_THREAD = "WaitingThreadInstanceId"; public static final String CMD_PARAM_SUB_PROCESS = "processInstanceId"; public static final String CMD_PARAM_EMPTY_SUB_PROCESS = "0"; public static final String CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; public static final String CMD_PARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; public static final String CMD_PARAM_START_NODE_NAMES = "StartNodeNameList"; public static final String CMD_PARAM_START_PARAMS = "StartParams"; public static final String CMD_PARAM_FATHER_PARAMS = "fatherParams"; /** * complement data start date */ public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate"; /** * complement data end date */ public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate"; /** * data source config */ public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis"; public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; public static final String DEVELOPMENT = "development"; public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties"; /** * sleep time */ public static final int SLEEP_TIME_MILLIS = 1000; /** * master task instance cache-database refresh interval */ public static final int CACHE_REFRESH_TIME_MILLIS = 20 * 1000; /** * heartbeat for zk info length */ public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10; public static final int HEARTBEAT_WITH_WEIGHT_FOR_ZOOKEEPER_INFO_LENGTH = 11; /** * jar */ public static final String JAR = "jar"; /** * hadoop */ public static final String HADOOP = "hadoop"; /** * -D <property>=<value> */ public static final String D = "-D"; /** * -D mapreduce.job.name=name */ public static final String MR_NAME = "mapreduce.job.name"; /** * -D mapreduce.job.queuename=queuename */ public static final String MR_QUEUE = "mapreduce.job.queuename"; /** * spark params constant */ public static final String MASTER = "--master"; public static final String DEPLOY_MODE = "--deploy-mode"; /** * --class CLASS_NAME */ public static final String MAIN_CLASS = "--class"; /** * --driver-cores NUM */ public static final String DRIVER_CORES = "--driver-cores"; /** * --driver-memory MEM */ public static final String DRIVER_MEMORY = "--driver-memory"; /** * --num-executors NUM */ public static final String NUM_EXECUTORS = "--num-executors"; /** * --executor-cores NUM */ public static final String EXECUTOR_CORES = "--executor-cores"; /** * --executor-memory MEM */ public static final String EXECUTOR_MEMORY = "--executor-memory"; /** * --name NAME */ public static final String SPARK_NAME = "--name"; /** * --queue QUEUE */ public static final String SPARK_QUEUE = "--queue"; /** * exit code success */ public static final int EXIT_CODE_SUCCESS = 0; /** * exit code kill */ public static final int EXIT_CODE_KILL = 137; /** * exit code failure */ public static final int EXIT_CODE_FAILURE = -1; /** * process or task definition failure */ public static final int DEFINITION_FAILURE = -1; /** * date format of yyyyMMdd */ public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd"; /** * date format of yyyyMMddHHmmss */ public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss"; /** * system date(yyyyMMddHHmmss) */ public static final String PARAMETER_DATETIME = "system.datetime"; /** * system date(yyyymmdd) today */ public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate"; /** * system date(yyyymmdd) yesterday */ public static final String PARAMETER_BUSINESS_DATE = "system.biz.date"; /** * ACCEPTED */ public static final String ACCEPTED = "ACCEPTED"; /** * SUCCEEDED */ public static final String SUCCEEDED = "SUCCEEDED"; /** * NEW */ public static final String NEW = "NEW"; /** * NEW_SAVING */ public static final String NEW_SAVING = "NEW_SAVING"; /** * SUBMITTED */ public static final String SUBMITTED = "SUBMITTED"; /** * FAILED */ public static final String FAILED = "FAILED"; /** * KILLED */ public static final String KILLED = "KILLED"; /** * RUNNING */ public static final String RUNNING = "RUNNING"; /** * underline "_" */ public static final String UNDERLINE = "_"; /** * quartz job prifix */ public static final String QUARTZ_JOB_PRIFIX = "job"; /** * quartz job group prifix */ public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup"; /** * projectId */ public static final String PROJECT_ID = "projectId"; /** * processId */ public static final String SCHEDULE_ID = "scheduleId"; /** * schedule */ public static final String SCHEDULE = "schedule"; /** * application regex */ public static final String APPLICATION_REGEX = "application_\\d+_\\d+"; public static final String PID = OSUtils.isWindows() ? "handle" : "pid"; /** * month_begin */ public static final String MONTH_BEGIN = "month_begin"; /** * add_months */ public static final String ADD_MONTHS = "add_months"; /** * month_end */ public static final String MONTH_END = "month_end"; /** * week_begin */ public static final String WEEK_BEGIN = "week_begin"; /** * week_end */ public static final String WEEK_END = "week_end"; /** * timestamp */ public static final String TIMESTAMP = "timestamp"; public static final char SUBTRACT_CHAR = '-'; public static final char ADD_CHAR = '+'; public static final char MULTIPLY_CHAR = '*'; public static final char DIVISION_CHAR = '/'; public static final char LEFT_BRACE_CHAR = '('; public static final char RIGHT_BRACE_CHAR = ')'; public static final String ADD_STRING = "+"; public static final String MULTIPLY_STRING = "*"; public static final String DIVISION_STRING = "/"; public static final String LEFT_BRACE_STRING = "("; public static final char P = 'P'; public static final char N = 'N'; public static final String SUBTRACT_STRING = "-"; public static final String GLOBAL_PARAMS = "globalParams"; public static final String LOCAL_PARAMS = "localParams"; public static final String LOCAL_PARAMS_LIST = "localParamsList"; public static final String SUBPROCESS_INSTANCE_ID = "subProcessInstanceId"; public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; public static final String PARENT_WORKFLOW_INSTANCE = "parentWorkflowInstance"; public static final String CONDITION_RESULT = "conditionResult"; public static final String DEPENDENCE = "dependence"; public static final String TASK_TYPE = "taskType"; public static final String TASK_LIST = "taskList"; public static final String RWXR_XR_X = "rwxr-xr-x"; public static final String QUEUE = "queue"; public static final String QUEUE_NAME = "queueName"; public static final int LOG_QUERY_SKIP_LINE_NUMBER = 0; public static final int LOG_QUERY_LIMIT = 4096; /** * master/worker server use for zk */ public static final String MASTER_TYPE = "master"; public static final String WORKER_TYPE = "worker"; public static final String DELETE_OP = "delete"; public static final String ADD_OP = "add"; public static final String ALIAS = "alias"; public static final String CONTENT = "content"; public static final String DEPENDENT_SPLIT = ":||"; public static final String DEPENDENT_ALL = "ALL"; /** * preview schedule execute count */ public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5; /** * kerberos */ public static final String KERBEROS = "kerberos"; /** * kerberos expire time */ public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time"; /** * java.security.krb5.conf */ public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; /** * java.security.krb5.conf.path */ public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path"; /** * hadoop.security.authentication */ public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; /** * hadoop.security.authentication */ public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; /** * com.amazonaws.services.s3.enableV4 */ public static final String AWS_S3_V4 = "com.amazonaws.services.s3.enableV4"; /** * loginUserFromKeytab user */ public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username"; /** * loginUserFromKeytab path */ public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path"; /** * task log info format */ public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s"; /** * hive conf */ public static final String HIVE_CONF = "hiveconf:"; /** * flink */ public static final String FLINK_YARN_CLUSTER = "yarn-cluster"; public static final String FLINK_RUN_MODE = "-m"; public static final String FLINK_YARN_SLOT = "-ys"; public static final String FLINK_APP_NAME = "-ynm"; public static final String FLINK_QUEUE = "-yqu"; public static final String FLINK_TASK_MANAGE = "-yn"; public static final String FLINK_JOB_MANAGE_MEM = "-yjm"; public static final String FLINK_TASK_MANAGE_MEM = "-ytm"; public static final String FLINK_MAIN_CLASS = "-c"; public static final String FLINK_PARALLELISM = "-p"; public static final String FLINK_SHUTDOWN_ON_ATTACHED_EXIT = "-sae"; public static final String FLINK_PYTHON = "-py"; public static final int[] NOT_TERMINATED_STATES = new int[] { ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), ExecutionStatus.RUNNING_EXECUTION.ordinal(), ExecutionStatus.DELAY_EXECUTION.ordinal(), ExecutionStatus.READY_PAUSE.ordinal(), ExecutionStatus.READY_STOP.ordinal(), ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(), ExecutionStatus.WAITING_THREAD.ordinal(), ExecutionStatus.WAITING_DEPEND.ordinal() }; /** * status */ public static final String STATUS = "status"; /** * message */ public static final String MSG = "msg"; /** * data total */ public static final String COUNT = "count"; /** * page size */ public static final String PAGE_SIZE = "pageSize"; /** * current page no */ public static final String PAGE_NUMBER = "pageNo"; /** * */ public static final String DATA_LIST = "data"; public static final String TOTAL_LIST = "totalList"; public static final String CURRENT_PAGE = "currentPage"; public static final String TOTAL_PAGE = "totalPage"; public static final String TOTAL = "total"; /** * workflow */ public static final String WORKFLOW_LIST = "workFlowList"; public static final String WORKFLOW_RELATION_LIST = "workFlowRelationList"; /** * session user */ public static final String SESSION_USER = "session.user"; public static final String SESSION_ID = "sessionId"; public static final String PASSWORD_DEFAULT = "******"; /** * locale */ public static final String LOCALE_LANGUAGE = "language"; /** * driver */ public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver"; public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver"; /** * database type */ public static final String MYSQL = "MYSQL"; public static final String POSTGRESQL = "POSTGRESQL"; public static final String HIVE = "HIVE"; public static final String SPARK = "SPARK"; public static final String CLICKHOUSE = "CLICKHOUSE"; public static final String ORACLE = "ORACLE"; public static final String SQLSERVER = "SQLSERVER"; public static final String DB2 = "DB2"; public static final String PRESTO = "PRESTO"; /** * jdbc url */ public static final String JDBC_MYSQL = "jdbc:mysql://"; public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; public static final String JDBC_HIVE_2 = "jdbc:hive2://"; public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://"; public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@"; public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//"; public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; public static final String JDBC_DB2 = "jdbc:db2://"; public static final String JDBC_PRESTO = "jdbc:presto://"; public static final String ADDRESS = "address"; public static final String DATABASE = "database"; public static final String JDBC_URL = "jdbcUrl"; public static final String PRINCIPAL = "principal"; public static final String OTHER = "other"; public static final String ORACLE_DB_CONNECT_TYPE = "connectType"; public static final String KERBEROS_KRB5_CONF_PATH = "javaSecurityKrb5Conf"; public static final String KERBEROS_KEY_TAB_USERNAME = "loginUserKeytabUsername"; public static final String KERBEROS_KEY_TAB_PATH = "loginUserKeytabPath"; /** * session timeout */ public static final int SESSION_TIME_OUT = 7200; public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024; public static final String UDF = "UDF"; public static final String CLASS = "class"; public static final String RECEIVERS = "receivers"; public static final String RECEIVERS_CC = "receiversCc"; /** * dataSource sensitive param */ public static final String DATASOURCE_PASSWORD_REGEX = "(?<=((?i)password((\\\\\":\\\\\")|(=')))).*?(?=((\\\\\")|(')))"; /** * default worker group */ public static final String DEFAULT_WORKER_GROUP = "default"; public static final Integer TASK_INFO_LENGTH = 5; /** * new * schedule time */ public static final String PARAMETER_SHECDULE_TIME = "schedule.time"; /** * authorize writable perm */ public static final int AUTHORIZE_WRITABLE_PERM = 7; /** * authorize readable perm */ public static final int AUTHORIZE_READABLE_PERM = 4; /** * plugin configurations */ public static final String PLUGIN_JAR_SUFFIX = ".jar"; public static final int NORMAL_NODE_STATUS = 0; public static final int ABNORMAL_NODE_STATUS = 1; public static final String START_TIME = "start time"; public static final String END_TIME = "end time"; public static final String START_END_DATE = "startDate,endDate"; /** * system line separator */ public static final String SYSTEM_LINE_SEPARATOR = System.getProperty("line.separator"); public static final String EXCEL_SUFFIX_XLS = ".xls"; /** * datasource encryption salt */ public static final String DATASOURCE_ENCRYPTION_SALT_DEFAULT = "!@#$%^&*"; public static final String DATASOURCE_ENCRYPTION_ENABLE = "datasource.encryption.enable"; public static final String DATASOURCE_ENCRYPTION_SALT = "datasource.encryption.salt"; /** * network interface preferred */ public static final String DOLPHIN_SCHEDULER_NETWORK_INTERFACE_PREFERRED = "dolphin.scheduler.network.interface.preferred"; /** * network IP gets priority, default inner outer */ public static final String DOLPHIN_SCHEDULER_NETWORK_PRIORITY_STRATEGY = "dolphin.scheduler.network.priority.strategy"; /** * exec shell scripts */ public static final String SH = "sh"; /** * pstree, get pud and sub pid */ public static final String PSTREE = "pstree"; /** * snow flake, data center id, this id must be greater than 0 and less than 32 */ public static final String SNOW_FLAKE_DATA_CENTER_ID = "data.center.id"; /** * docker & kubernetes */ public static final boolean DOCKER_MODE = StringUtils.isNotEmpty(System.getenv("DOCKER")); public static final boolean KUBERNETES_MODE = StringUtils.isNotEmpty(System.getenv("KUBERNETES_SERVICE_HOST")) && StringUtils.isNotEmpty(System.getenv("KUBERNETES_SERVICE_PORT")); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import static java.nio.charset.StandardCharsets.UTF_8; import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT; import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.TimeZone; import com.fasterxml.jackson.core.JsonProcessingException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.fasterxml.jackson.databind.type.CollectionType; /** * json utils */ public class JSONUtils { private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); /** * can use static singleton, inject: just make sure to reuse! */ private static final ObjectMapper objectMapper = new ObjectMapper() .configure(FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true) .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) .configure(REQUIRE_SETTERS_FOR_GETTERS, true) .setTimeZone(TimeZone.getDefault()); private JSONUtils() { throw new UnsupportedOperationException("Construct JSONUtils"); } public static ArrayNode createArrayNode() { return objectMapper.createArrayNode(); } public static ObjectNode createObjectNode() { return objectMapper.createObjectNode(); } public static JsonNode toJsonNode(Object obj) { return objectMapper.valueToTree(obj); } /** * json representation of object * * @param object object * @param feature feature * @return object to json string */ public static String toJsonString(Object object, SerializationFeature feature) { try { ObjectWriter writer = objectMapper.writer(feature); return writer.writeValueAsString(object); } catch (Exception e) { logger.error("object to json exception!", e); } return null; } /** * This method deserializes the specified Json into an object of the specified class. It is not * suitable to use if the specified class is a generic type since it will not have the generic * type information because of the Type Erasure feature of Java. Therefore, this method should not * be used if the desired type is a generic type. Note that this method works fine if the any of * the fields of the specified object are generics, just the object itself should not be a * generic type. * * @param json the string from which the object is to be deserialized * @param clazz the class of T * @param <T> T * @return an object of type T from the string * classOfT */ public static <T> T parseObject(String json, Class<T> clazz) { if (StringUtils.isEmpty(json)) { return null; } try { return objectMapper.readValue(json, clazz); } catch (Exception e) { logger.error("parse object exception!", e); } return null; } /** * deserialize * * @param src byte array * @param clazz class * @param <T> deserialize type * @return deserialize type */ public static <T> T parseObject(byte[] src, Class<T> clazz) { if (src == null) { return null; } String json = new String(src, UTF_8); return parseObject(json, clazz); } /** * json to list * * @param json json string * @param clazz class * @param <T> T * @return list */ public static <T> List<T> toList(String json, Class<T> clazz) { if (StringUtils.isEmpty(json)) { return Collections.emptyList(); } try { CollectionType listType = objectMapper.getTypeFactory().constructCollectionType(ArrayList.class, clazz); return objectMapper.readValue(json, listType); } catch (Exception e) { logger.error("parse list exception!", e); } return Collections.emptyList(); } /** * check json object valid * * @param json json * @return true if valid */ public static boolean checkJsonValid(String json) { if (StringUtils.isEmpty(json)) { return false; } try { objectMapper.readTree(json); return true; } catch (IOException e) { logger.error("check json object valid exception!", e); } return false; } /** * Method for finding a JSON Object field with specified name in this * node or its child nodes, and returning value it has. * If no matching field is found in this node or its descendants, returns null. * * @param jsonNode json node * @param fieldName Name of field to look for * @return Value of first matching node found, if any; null if none */ public static String findValue(JsonNode jsonNode, String fieldName) { JsonNode node = jsonNode.findValue(fieldName); if (node == null) { return null; } return node.asText(); } /** * json to map * {@link #toMap(String, Class, Class)} * * @param json json * @return json to map */ public static Map<String, String> toMap(String json) { return parseObject(json, new TypeReference<Map<String, String>>() {}); } /** * from the key-value generated json to get the str value no matter the real type of value * @param json the json str * @param nodeName key * @return the str value of key */ public static String getNodeString(String json, String nodeName) { try { JsonNode rootNode = objectMapper.readTree(json); return rootNode.has(nodeName) ? rootNode.get(nodeName).toString() : ""; } catch (JsonProcessingException e) { return ""; } } /** * json to map * * @param json json * @param classK classK * @param classV classV * @param <K> K * @param <V> V * @return to map */ public static <K, V> Map<K, V> toMap(String json, Class<K> classK, Class<V> classV) { return parseObject(json, new TypeReference<Map<K, V>>() {}); } /** * json to object * * @param json json string * @param type type reference * @param <T> * @return return parse object */ public static <T> T parseObject(String json, TypeReference<T> type) { if (StringUtils.isEmpty(json)) { return null; } try { return objectMapper.readValue(json, type); } catch (Exception e) { logger.error("json to map exception!", e); } return null; } /** * object to json string * * @param object object * @return json string */ public static String toJsonString(Object object) { try { return objectMapper.writeValueAsString(object); } catch (Exception e) { throw new RuntimeException("Object json deserialization exception.", e); } } /** * serialize to json byte * * @param obj object * @param <T> object type * @return byte array */ public static <T> byte[] toJsonByteArray(T obj) { if (obj == null) { return null; } String json = ""; try { json = toJsonString(obj); } catch (Exception e) { logger.error("json serialize exception.", e); } return json.getBytes(UTF_8); } public static ObjectNode parseObject(String text) { try { if (text.isEmpty()) { return parseObject(text, ObjectNode.class); } else { return (ObjectNode) objectMapper.readTree(text); } } catch (Exception e) { throw new RuntimeException("String json deserialization exception.", e); } } public static ArrayNode parseArray(String text) { try { return (ArrayNode) objectMapper.readTree(text); } catch (Exception e) { throw new RuntimeException("Json deserialization exception.", e); } } /** * json serializer */ public static class JsonDataSerializer extends JsonSerializer<String> { @Override public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException { gen.writeRawValue(value); } } /** * json data deserializer */ public static class JsonDataDeserializer extends JsonDeserializer<String> { @Override public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { JsonNode node = p.getCodec().readTree(p); if (node instanceof TextNode) { return node.asText(); } else { return node.toString(); } } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * logger utils */ public class LoggerUtils { private LoggerUtils() { throw new UnsupportedOperationException("Construct LoggerUtils"); } private static final Logger logger = LoggerFactory.getLogger(LoggerUtils.class); /** * rules for extracting application ID */ private static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); /** * Task Logger's prefix */ public static final String TASK_LOGGER_INFO_PREFIX = "TASK"; /** * Task Logger Thread's name */ public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; /** * Task Logger Thread's name */ public static final String TASK_APPID_LOG_FORMAT = "[taskAppId="; /** * build job id * * @param affix Task Logger's prefix * @param processInstId process instance id * @param taskId task id * @return task id format */ public static String buildTaskId(String affix, Long processDefineCode, int processDefineVersion, int processInstId, int taskId) { // - [taskAppId=TASK-798_1-4084-15210] return String.format(" - %s%s-%s_%s-%s-%s]", TASK_APPID_LOG_FORMAT, affix, processDefineCode, processDefineVersion, processInstId, taskId); } /** * processing log * get yarn application id list * * @param log log content * @param logger logger * @return app id list */ public static List<String> getAppIds(String log, Logger logger) { List<String> appIds = new ArrayList<>(); Matcher matcher = APPLICATION_REGEX.matcher(log); // analyse logs to get all submit yarn application id while (matcher.find()) { String appId = matcher.group(); if (!appIds.contains(appId)) { logger.info("find app id: {}", appId); appIds.add(appId); } } return appIds; } /** * read whole file content * * @param filePath file path * @return whole file content */ public static String readWholeFileContent(String filePath) { String line; StringBuilder sb = new StringBuilder(); try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)))) { while ((line = br.readLine()) != null) { sb.append(line + "\r\n"); } return sb.toString(); } catch (IOException e) { logger.error("read file error", e); } return ""; } public static void logError(Optional<Logger> optionalLogger , String error) { optionalLogger.ifPresent((Logger logger) -> logger.error(error)); } public static void logError(Optional<Logger> optionalLogger , Throwable e) { optionalLogger.ifPresent((Logger logger) -> logger.error(e.getMessage(), e)); } public static void logError(Optional<Logger> optionalLogger , String error, Throwable e) { optionalLogger.ifPresent((Logger logger) -> logger.error(error, e)); } public static void logInfo(Optional<Logger> optionalLogger , String info) { optionalLogger.ifPresent((Logger logger) -> logger.info(info)); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /** * param utils */ public class ParamUtils { /** * parameter conversion * @param globalParams global params * @param globalParamsMap global params map * @param localParams local params * @param commandType command type * @param scheduleTime schedule time * @return global params */ public static Map<String,Property> convert(Map<String,Property> globalParams, Map<String,String> globalParamsMap, Map<String,Property> localParams, Map<String,Property> varParams, CommandType commandType, Date scheduleTime) { if (globalParams == null && localParams == null) { return null; } // if it is a complement, // you need to pass in the task instance id to locate the time // of the process instance complement Map<String,String> timeParams = BusinessTimeUtils .getBusinessTime(commandType, scheduleTime); if (globalParamsMap != null) { timeParams.putAll(globalParamsMap); } if (globalParams != null && localParams != null) { localParams.putAll(globalParams); globalParams = localParams; } else if (globalParams == null && localParams != null) { globalParams = localParams; } if (varParams != null) { varParams.putAll(globalParams); globalParams = varParams; } Iterator<Map.Entry<String, Property>> iter = globalParams.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<String, Property> en = iter.next(); Property property = en.getValue(); if (StringUtils.isNotEmpty(property.getValue()) && property.getValue().startsWith("$")) { /** * local parameter refers to global parameter with the same name * note: the global parameters of the process instance here are solidified parameters, * and there are no variables in them. */ String val = property.getValue(); val = ParameterUtils.convertParameterPlaceholders(val, timeParams); property.setValue(val); } } return globalParams; } /** * format convert * @param paramsMap params map * @return Map of converted */ public static Map<String,String> convert(Map<String,Property> paramsMap) { if (paramsMap == null) { return null; } Map<String,String> map = new HashMap<>(); Iterator<Map.Entry<String, Property>> iter = paramsMap.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<String, Property> en = iter.next(); map.put(en.getKey(),en.getValue().getValue()); } return map; } /** * get parameters map * @param definedParams definedParams * @return parameters map */ public static Map<String,Property> getUserDefParamsMap(Map<String,String> definedParams) { if (definedParams != null) { Map<String,Property> userDefParamsMaps = new HashMap<>(); Iterator<Map.Entry<String, String>> iter = definedParams.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<String, String> en = iter.next(); Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR, en.getValue()); userDefParamsMaps.put(property.getProp(),property); } return userDefParamsMaps; } return null; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.slf4j.Logger; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; import java.util.function.Consumer; /** * shell command executor */ public class ShellCommandExecutor extends AbstractCommandExecutor { /** * For Unix-like, using sh */ public static final String SH = "sh"; /** * For Windows, using cmd.exe */ public static final String CMD = "cmd.exe"; /** * constructor * @param logHandler logHandler * @param taskExecutionContext taskExecutionContext * @param logger logger */ public ShellCommandExecutor(Consumer<List<String>> logHandler, TaskExecutionContext taskExecutionContext, Logger logger) { super(logHandler,taskExecutionContext,logger); } public ShellCommandExecutor(List<String> logBuffer) { super(logBuffer); } @Override protected String buildCommandFilePath() { // command file return String.format("%s/%s.%s" , taskExecutionContext.getExecutePath() , taskExecutionContext.getTaskAppId() , OSUtils.isWindows() ? "bat" : "command"); } /** * get command type * @return command type */ @Override protected String commandInterpreter() { return OSUtils.isWindows() ? CMD : SH; } /** * create command file if not exists * @param execCommand exec command * @param commandFile command file * @throws IOException io exception */ @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("tenantCode user:{}, task dir:{}", taskExecutionContext.getTenantCode(), taskExecutionContext.getTaskAppId()); // create if non existence if (!Files.exists(Paths.get(commandFile))) { logger.info("create command file:{}", commandFile); StringBuilder sb = new StringBuilder(); if (OSUtils.isWindows()) { sb.append("@echo off\n"); sb.append("cd /d %~dp0\n"); if (taskExecutionContext.getEnvFile() != null) { sb.append("call ").append(taskExecutionContext.getEnvFile()).append("\n"); } } else { sb.append("#!/bin/sh\n"); sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); sb.append("cd $BASEDIR\n"); if (taskExecutionContext.getEnvFile() != null) { sb.append("source ").append(taskExecutionContext.getEnvFile()).append("\n"); } } sb.append(execCommand); logger.info("command : {}", sb.toString()); // write data to file FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.shell; import static java.util.Calendar.DAY_OF_MONTH; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.slf4j.Logger; import java.io.File; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * shell task */ public class ShellTask extends AbstractTask { /** * shell parameters */ private ShellParameters shellParameters; /** * shell command executor */ private ShellCommandExecutor shellCommandExecutor; /** * taskExecutionContext */ private TaskExecutionContext taskExecutionContext; /** * constructor * * @param taskExecutionContext taskExecutionContext * @param logger logger */ public ShellTask(TaskExecutionContext taskExecutionContext, Logger logger) { super(taskExecutionContext, logger); this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger); } @Override public void init() { logger.info("shell task params {}", taskExecutionContext.getTaskParams()); shellParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ShellParameters.class); if (!shellParameters.checkParameters()) { throw new RuntimeException("shell task params is not valid"); } } @Override public void handle() throws Exception { try { // construct process CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(buildCommand()); setExitStatusCode(commandExecuteResult.getExitStatusCode()); setAppIds(commandExecuteResult.getAppIds()); setProcessId(commandExecuteResult.getProcessId()); shellParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (Exception e) { logger.error("shell task error", e); setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } } @Override public void cancelApplication(boolean cancelApplication) throws Exception { // cancel process shellCommandExecutor.cancelApplication(); } /** * create command * * @return file name * @throws Exception exception */ private String buildCommand() throws Exception { // generate scripts String fileName = String.format("%s/%s_node.%s", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); Path path = new File(fileName).toPath(); if (Files.exists(path)) { return fileName; } String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); script = parseScript(script); shellParameters.setRawScript(script); logger.info("raw script : {}", shellParameters.getRawScript()); logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); Set<PosixFilePermission> perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); if (OSUtils.isWindows()) { Files.createFile(path); } else { Files.createFile(path, attr); } Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); return fileName; } @Override public AbstractParameters getParameters() { return shellParameters; } private String parseScript(String script) { // combining local and global parameters Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), taskExecutionContext.getDefinedParams(), shellParameters.getLocalParametersMap(), shellParameters.getVarPoolMap(), CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), taskExecutionContext.getScheduleTime()); // replace variable TIME with $[YYYYmmddd...] in shell file when history run job and batch complement job if (taskExecutionContext.getScheduleTime() != null) { if (paramsMap == null) { paramsMap = new HashMap<>(); } Date date = taskExecutionContext.getScheduleTime(); if (CommandType.COMPLEMENT_DATA.getCode() == taskExecutionContext.getCmdTypeIfComplement()) { date = DateUtils.add(taskExecutionContext.getScheduleTime(), DAY_OF_MONTH, 1); } String dateTime = DateUtils.format(date, Constants.PARAMETER_FORMAT_TIME); Property p = new Property(); p.setValue(dateTime); p.setProp(Constants.PARAMETER_DATETIME); paramsMap.put(Constants.PARAMETER_DATETIME, p); } return ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ParamUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.utils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test ParamUtils */ public class ParamUtilsTest { private static final Logger logger = LoggerFactory.getLogger(ParamUtilsTest.class); //Define global variables public Map<String, Property> globalParams = new HashMap<>(); public Map<String, String> globalParamsMap = new HashMap<>(); public Map<String, Property> localParams = new HashMap<>(); public Map<String, Property> varPoolParams = new HashMap<>(); /** * Init params * * @throws Exception */ @Before public void setUp() throws Exception { Property property = new Property(); property.setProp("global_param"); property.setDirect(Direct.IN); property.setType(DataType.VARCHAR); property.setValue("${system.biz.date}"); globalParams.put("global_param", property); globalParamsMap.put("global_param", "${system.biz.date}"); Property localProperty = new Property(); localProperty.setProp("local_param"); localProperty.setDirect(Direct.IN); localProperty.setType(DataType.VARCHAR); localProperty.setValue("${global_param}"); localParams.put("local_param", localProperty); Property varProperty = new Property(); varProperty.setProp("local_param"); varProperty.setDirect(Direct.IN); varProperty.setType(DataType.VARCHAR); varProperty.setValue("${global_param}"); varPoolParams.put("varPool", varProperty); } /** * Test convert */ @Test public void testConvert() { //The expected value String expected = "{\"varPool\":{\"prop\":\"local_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}," + "\"global_param\":{\"prop\":\"global_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}," + "\"local_param\":{\"prop\":\"local_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}}"; //The expected value when globalParams is null but localParams is not null String expected1 = "{\"varPool\":{\"prop\":\"local_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}," + "\"global_param\":{\"prop\":\"global_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}," + "\"local_param\":{\"prop\":\"local_param\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"20191229\"}}"; //Define expected date , the month is 0-base Calendar calendar = Calendar.getInstance(); calendar.set(2019, 11, 30); Date date = calendar.getTime(); //Invoke convert Map<String, Property> paramsMap = ParamUtils.convert(globalParams, globalParamsMap, localParams, varPoolParams,CommandType.START_PROCESS, date); String result = JSONUtils.toJsonString(paramsMap); assertEquals(expected, result); for (Map.Entry<String, Property> entry : paramsMap.entrySet()) { String key = entry.getKey(); Property prop = entry.getValue(); logger.info(key + " : " + prop.getValue()); } //Invoke convert with null globalParams Map<String, Property> paramsMap1 = ParamUtils.convert(null, globalParamsMap, localParams,varPoolParams, CommandType.START_PROCESS, date); String result1 = JSONUtils.toJsonString(paramsMap1); assertEquals(expected1, result1); //Null check, invoke convert with null globalParams and null localParams Map<String, Property> paramsMap2 = ParamUtils.convert(null, globalParamsMap, null, varPoolParams,CommandType.START_PROCESS, date); assertNull(paramsMap2); } /** * Test the overload method of convert */ @Test public void testConvert1() { //The expected value String expected = "{\"global_param\":\"${system.biz.date}\"}"; //Invoke convert Map<String, String> paramsMap = ParamUtils.convert(globalParams); String result = JSONUtils.toJsonString(paramsMap); assertEquals(expected, result); logger.info(result); //Null check Map<String, String> paramsMap1 = ParamUtils.convert(null); assertNull(paramsMap1); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,773
[Improvement][server] need to support two parameters related to task
**Describe the question** When I'm using the shell task ,I need the instance id of task and the absolute path of task. **What are the current deficiencies and the benefits of improvement** **Which version of DolphinScheduler:** -[dev] **Describe alternatives you've considered**
https://github.com/apache/dolphinscheduler/issues/5773
https://github.com/apache/dolphinscheduler/pull/5774
ab527a5e5abd04243305a50f184d8009b9edf21a
9fd5145b66646f3df847ea3c81bb272621ee86ca
"2021-07-08T10:01:12Z"
java
"2021-07-09T09:00:32Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.shell; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.AbstractCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import java.util.Date; import java.util.List; /** * python shell command executor test */ @RunWith(PowerMockRunner.class) @PrepareForTest(OSUtils.class) @PowerMockIgnore({"javax.management.*"}) public class ShellCommandExecutorTest { private static final Logger logger = LoggerFactory.getLogger(ShellCommandExecutorTest.class); private ProcessService processService = null; private ApplicationContext applicationContext; @Before public void before() { applicationContext = PowerMockito.mock(ApplicationContext.class); processService = PowerMockito.mock(ProcessService.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); PowerMockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); } @Ignore @Test public void test() throws Exception { TaskProps taskProps = new TaskProps(); // processDefineId_processInstanceId_taskInstanceId taskProps.setExecutePath("/opt/soft/program/tmp/dolphinscheduler/exec/flow/5/36/2864/7657"); taskProps.setTaskAppId("36_2864_7657"); // set tenant -> task execute linux user taskProps.setTenantCode("hdfs"); taskProps.setTaskStartTime(new Date()); taskProps.setTaskTimeout(360000); taskProps.setTaskInstanceId(7657); TaskInstance taskInstance = processService.findTaskInstanceById(7657); // TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class); // taskProps.setTaskParams(taskNode.getParams()); // custom logger // Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, // taskInstance.getProcessDefinitionId(), // taskInstance.getProcessInstanceId(), // taskInstance.getId())); // AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); AbstractTask task = null; logger.info("task info : {}", task); // job init task.init(); // job handle task.handle(); ExecutionStatus status = ExecutionStatus.SUCCESS; if (task.getExitStatusCode() == Constants.EXIT_CODE_SUCCESS) { status = ExecutionStatus.SUCCESS; } else if (task.getExitStatusCode() == Constants.EXIT_CODE_KILL) { status = ExecutionStatus.KILL; } else { status = ExecutionStatus.FAILURE; } logger.info(status.toString()); } @Test public void testParseProcessOutput() { Class<AbstractCommandExecutor> shellCommandExecutorClass = AbstractCommandExecutor.class; try { Method method = shellCommandExecutorClass.getDeclaredMethod("parseProcessOutput", Process.class); method.setAccessible(true); Object[] arg1s = {new Process() { @Override public OutputStream getOutputStream() { return new OutputStream() { @Override public void write(int b) throws IOException { logger.info("unit test"); } }; } @Override public InputStream getInputStream() { return new InputStream() { @Override public int read() throws IOException { return 0; } }; } @Override public InputStream getErrorStream() { return null; } @Override public int waitFor() throws InterruptedException { return 0; } @Override public int exitValue() { return 0; } @Override public void destroy() { logger.info("unit test"); } } }; method.invoke(new AbstractCommandExecutor(null, new TaskExecutionContext(), logger) { @Override protected String buildCommandFilePath() { return null; } @Override protected String commandInterpreter() { return null; } @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("unit test"); } }, arg1s); } catch (Exception e) { logger.error(e.getMessage()); } } @Test public void testFindAppId() { Class<AbstractCommandExecutor> shellCommandExecutorClass = AbstractCommandExecutor.class; try { Method method = shellCommandExecutorClass.getDeclaredMethod("findAppId", new Class[]{String.class}); method.setAccessible(true); Object[] arg1s = {"11111"}; String result = (String) method.invoke(new AbstractCommandExecutor(null, null, null) { @Override protected String buildCommandFilePath() { return null; } @Override protected String commandInterpreter() { return null; } @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("unit test"); } }, arg1s); } catch (Exception e) { logger.error(e.getMessage()); } } @Test public void testConvertFile2List() { Class<AbstractCommandExecutor> shellCommandExecutorClass = AbstractCommandExecutor.class; try { Method method = shellCommandExecutorClass.getDeclaredMethod("convertFile2List", String.class); method.setAccessible(true); Object[] arg1s = {"/opt/1.txt"}; List<String> result = (List<String>) method.invoke(new AbstractCommandExecutor(null, null, null) { @Override protected String buildCommandFilePath() { return null; } @Override protected String commandInterpreter() { return null; } @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("unit test"); } }, arg1s); Assert.assertTrue(true); } catch (Exception e) { logger.error(e.getMessage()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,786
[Improvement][Server] When the Worker turns down, the MasterServer cannot handle the Remove event correctly and throws NPE
**Describe the question** When the Worker turns down, the MasterServer cannot handle the Remove event correctly and throws an NPE exception: ```java [ERROR] 2021-07-10 00:27:52.736 org.apache.curator.framework.recipes.cache.TreeCache:[779] - java.lang.NullPointerException: null at org.apache.dolphinscheduler.server.master.registry.MasterRegistryDataListener.handleWorkerEvent(MasterRegistryDataListener.java:83) at org.apache.dolphinscheduler.server.master.registry.MasterRegistryDataListener.notify(MasterRegistryDataListener.java:47) at org.apache.dolphinscheduler.spi.register.ListenerManager.dataChange(ListenerManager.java:63) at org.apache.dolphinscheduler.plugin.registry.zookeeper.ZookeeperRegistry.lambda$subscribe$0(ZookeeperRegistry.java:166) at org.apache.curator.framework.recipes.cache.TreeCache$2.apply(TreeCache.java:760) at org.apache.curator.framework.recipes.cache.TreeCache$2.apply(TreeCache.java:754) at org.apache.curator.framework.listen.ListenerContainer$1.run(ListenerContainer.java:100) at org.apache.curator.shaded.com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:30) at org.apache.curator.framework.listen.ListenerContainer.forEach(ListenerContainer.java:92) at org.apache.curator.framework.recipes.cache.TreeCache.callListeners(TreeCache.java:753) at org.apache.curator.framework.recipes.cache.TreeCache.access$1900(TreeCache.java:75) at org.apache.curator.framework.recipes.cache.TreeCache$4.run(TreeCache.java:865) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) at java.util.concurrent.FutureTask.run(FutureTask.java) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) ``` **What are the current deficiencies and the benefits of improvement** ```java public class MasterRegistryDataListener implements SubscribeListener { private static final Logger logger = LoggerFactory.getLogger(MasterRegistryDataListener.class); @Resource private MasterRegistryClient masterRegistryClient; ``` The `MasterRegistryDataListener` class will be used by `MasterRegistryClient`, as shown below: ```java // line 123 registryClient.subscribe(REGISTRY_DOLPHINSCHEDULER_NODE, new MasterRegistryDataListener()); ``` **The @Resource annotation will not take effect in the current case, it would be assigned to null** **Which version of DolphinScheduler:** latest dev branch
https://github.com/apache/dolphinscheduler/issues/5786
https://github.com/apache/dolphinscheduler/pull/5787
9fd5145b66646f3df847ea3c81bb272621ee86ca
626c47399af4b7e8a839165d9d36fdbe04cc54cd
"2021-07-09T16:44:12Z"
java
"2021-07-09T17:08:16Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.registry; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.spi.register.DataChangeEvent; import org.apache.dolphinscheduler.spi.register.SubscribeListener; import javax.annotation.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MasterRegistryDataListener implements SubscribeListener { private static final Logger logger = LoggerFactory.getLogger(MasterRegistryDataListener.class); @Resource MasterRegistryClient masterRegistryClient; @Override public void notify(String path, DataChangeEvent event) { //monitor master if (path.startsWith(REGISTRY_DOLPHINSCHEDULER_MASTERS + Constants.SINGLE_SLASH)) { handleMasterEvent(event, path); } else if (path.startsWith(REGISTRY_DOLPHINSCHEDULER_WORKERS + Constants.SINGLE_SLASH)) { //monitor worker handleWorkerEvent(event, path); } } /** * monitor master * * @param event event * @param path path */ public void handleMasterEvent(DataChangeEvent event, String path) { switch (event) { case ADD: logger.info("master node added : {}", path); break; case REMOVE: masterRegistryClient.removeNodePath(path, NodeType.MASTER, true); break; default: break; } } /** * monitor worker * * @param event event * @param path path */ public void handleWorkerEvent(DataChangeEvent event, String path) { switch (event) { case ADD: logger.info("worker node added : {}", path); break; case REMOVE: logger.info("worker node deleted : {}", path); masterRegistryClient.removeNodePath(path, NodeType.WORKER, true); break; default: break; } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,775
[Improvement][Worker] Task log may be lost
**Describe the question** All the code in this issue is at `AbstractCommandExecutor`. If you have seen the task logging code in DolphinScheduler, you probably know that we currently use the production-consumption model for logging. We use one thread to write the log to the collection. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L351-L371 And we use another thread to consumer the log from collection and write to log file. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L375-L390 https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L549-L562 The problem is that when executing between line 558 and line 560, the logs added to the collection will be lost.(`logHandler.accept` will take some time). **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** There are two ways to solve the issue: 1. We can use a block queue to store the log instead of using `Collections.synchronizedList`. 2. We can direct use a read write lock to in `AbstractCommandExecutor` to solve the concurrent problem, instead of using `Collections.synchronizedList`
https://github.com/apache/dolphinscheduler/issues/5775
https://github.com/apache/dolphinscheduler/pull/5783
626c47399af4b7e8a839165d9d36fdbe04cc54cd
30af55b82ae560a5300930af062ad3a88d542e3d
"2021-07-08T13:49:44Z"
java
"2021-07-09T17:14:59Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_KILL; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; /** * abstract command executor */ public abstract class AbstractCommandExecutor { /** * rules for extracting application ID */ protected static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX); protected StringBuilder varPool = new StringBuilder(); /** * process */ private Process process; /** * log handler */ protected Consumer<List<String>> logHandler; /** * logger */ protected Logger logger; /** * log list */ protected final List<String> logBuffer; protected boolean logOutputIsScuccess = false; /** * taskExecutionContext */ protected TaskExecutionContext taskExecutionContext; /** * taskExecutionContextCacheManager */ private TaskExecutionContextCacheManager taskExecutionContextCacheManager; public AbstractCommandExecutor(Consumer<List<String>> logHandler, TaskExecutionContext taskExecutionContext, Logger logger) { this.logHandler = logHandler; this.taskExecutionContext = taskExecutionContext; this.logger = logger; this.logBuffer = Collections.synchronizedList(new ArrayList<>()); this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); } protected AbstractCommandExecutor(List<String> logBuffer) { this.logBuffer = logBuffer; } /** * build process * * @param commandFile command file * @throws IOException IO Exception */ private void buildProcess(String commandFile) throws IOException { // setting up user to run commands List<String> command = new LinkedList<>(); //init process builder ProcessBuilder processBuilder = new ProcessBuilder(); // setting up a working directory processBuilder.directory(new File(taskExecutionContext.getExecutePath())); // merge error information to standard output stream processBuilder.redirectErrorStream(true); // setting up user to run commands if (!OSUtils.isWindows() && CommonUtils.isSudoEnable()) { command.add("sudo"); command.add("-u"); command.add(taskExecutionContext.getTenantCode()); } command.add(commandInterpreter()); command.addAll(commandOptions()); command.add(commandFile); // setting commands processBuilder.command(command); process = processBuilder.start(); // print command printCommand(command); } /** * task specific execution logic * * @param execCommand execCommand * @return CommandExecuteResult * @throws Exception if error throws Exception */ public CommandExecuteResult run(String execCommand) throws Exception { CommandExecuteResult result = new CommandExecuteResult(); int taskInstanceId = taskExecutionContext.getTaskInstanceId(); // If the task has been killed, then the task in the cache is null if (null == taskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId)) { result.setExitStatusCode(EXIT_CODE_KILL); return result; } if (StringUtils.isEmpty(execCommand)) { taskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); return result; } String commandFilePath = buildCommandFilePath(); // create command file if not exists createCommandFileIfNotExists(execCommand, commandFilePath); //build process buildProcess(commandFilePath); // parse process output parseProcessOutput(process); Integer processId = getProcessId(process); result.setProcessId(processId); // cache processId taskExecutionContext.setProcessId(processId); boolean updateTaskExecutionContextStatus = taskExecutionContextCacheManager.updateTaskExecutionContext(taskExecutionContext); if (Boolean.FALSE.equals(updateTaskExecutionContextStatus)) { ProcessUtils.kill(taskExecutionContext); result.setExitStatusCode(EXIT_CODE_KILL); return result; } // print process id logger.info("process start, process id is: {}", processId); // if timeout occurs, exit directly long remainTime = getRemaintime(); // waiting for the run to finish boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); // if SHELL task exit if (status) { // set appIds List<String> appIds = getAppIds(taskExecutionContext.getLogPath()); result.setAppIds(String.join(Constants.COMMA, appIds)); // SHELL task state result.setExitStatusCode(process.exitValue()); // if yarn task , yarn state is final state if (process.exitValue() == 0) { result.setExitStatusCode(isSuccessOfYarnState(appIds) ? EXIT_CODE_SUCCESS : EXIT_CODE_FAILURE); } } else { logger.error("process has failure , exitStatusCode:{}, processExitValue:{}, ready to kill ...", result.getExitStatusCode(), process.exitValue()); ProcessUtils.kill(taskExecutionContext); result.setExitStatusCode(EXIT_CODE_FAILURE); } logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{} ,processWaitForStatus:{} ,processExitValue:{}", taskExecutionContext.getExecutePath(), processId, result.getExitStatusCode(), status, process.exitValue()); return result; } public String getVarPool() { return varPool.toString(); } /** * cancel application * * @throws Exception exception */ public void cancelApplication() throws Exception { if (process == null) { return; } // clear log clear(); int processId = getProcessId(process); logger.info("cancel process: {}", processId); // kill , waiting for completion boolean killed = softKill(processId); if (!killed) { // hard kill hardKill(processId); // destory process.destroy(); process = null; } } /** * soft kill * * @param processId process id * @return process is alive * @throws InterruptedException interrupted exception */ private boolean softKill(int processId) { if (processId != 0 && process.isAlive()) { try { // sudo -u user command to run command String cmd = String.format("kill %d", processId); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("soft kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { logger.info("kill attempt failed", e); } } return !process.isAlive(); } /** * hard kill * * @param processId process id */ private void hardKill(int processId) { if (processId != 0 && process.isAlive()) { try { String cmd = String.format("kill -9 %d", processId); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("hard kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { logger.error("kill attempt failed ", e); } } } /** * print command * * @param commands process builder */ private void printCommand(List<String> commands) { String cmdStr; try { cmdStr = ProcessUtils.buildCommandStr(commands); logger.info("task run command:\n{}", cmdStr); } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * clear */ private void clear() { List<String> markerList = new ArrayList<>(); markerList.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString()); if (!logBuffer.isEmpty()) { // log handle logHandler.accept(logBuffer); logBuffer.clear(); } logHandler.accept(markerList); } /** * get the standard output of the process * * @param process process */ private void parseProcessOutput(Process process) { String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskExecutionContext.getTaskAppId()); ExecutorService getOutputLogService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName + "-" + "getOutputLogService"); getOutputLogService.submit(() -> { BufferedReader inReader = null; try { inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; logBuffer.add("welcome to use bigdata scheduling system..."); while ((line = inReader.readLine()) != null) { if (line.startsWith("${setValue(")) { varPool.append(line.substring("${setValue(".length(), line.length() - 2)); varPool.append("$VarPool$"); } else { logBuffer.add(line); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { logOutputIsScuccess = true; close(inReader); } }); getOutputLogService.shutdown(); ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); parseProcessOutputExecutorService.submit(() -> { try { long lastFlushTime = System.currentTimeMillis(); while (logBuffer.size() > 0 || !logOutputIsScuccess) { if (logBuffer.size() > 0) { lastFlushTime = flush(lastFlushTime); } else { Thread.sleep(Constants.DEFAULT_LOG_FLUSH_INTERVAL); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { clear(); } }); parseProcessOutputExecutorService.shutdown(); } /** * check yarn state * * @param appIds application id list * @return is success of yarn task state */ public boolean isSuccessOfYarnState(List<String> appIds) { boolean result = true; try { for (String appId : appIds) { logger.info("check yarn application status, appId:{}", appId); while (Stopper.isRunning()) { ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); if (logger.isDebugEnabled()) { logger.debug("check yarn application status, appId:{}, final state:{}", appId, applicationStatus.name()); } if (applicationStatus.equals(ExecutionStatus.FAILURE) || applicationStatus.equals(ExecutionStatus.KILL)) { return false; } if (applicationStatus.equals(ExecutionStatus.SUCCESS)) { break; } ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } } } catch (Exception e) { logger.error("yarn applications: {} , query status failed, exception:{}", StringUtils.join(appIds, ","), e); result = false; } return result; } public int getProcessId() { return getProcessId(process); } /** * get app links * * @param logPath log path * @return app id list */ private List<String> getAppIds(String logPath) { List<String> logs = convertFile2List(logPath); List<String> appIds = new ArrayList<>(); /** * analysis log?get submited yarn application id */ for (String log : logs) { String appId = findAppId(log); if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) { logger.info("find app id: {}", appId); appIds.add(appId); } } return appIds; } /** * convert file to list * * @param filename file name * @return line list */ private List<String> convertFile2List(String filename) { List lineList = new ArrayList<String>(100); File file = new File(filename); if (!file.exists()) { return lineList; } BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8)); String line = null; while ((line = br.readLine()) != null) { lineList.add(line); } } catch (Exception e) { logger.error(String.format("read file: %s failed : ", filename), e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } return lineList; } /** * find app id * * @param line line * @return appid */ private String findAppId(String line) { Matcher matcher = APPLICATION_REGEX.matcher(line); if (matcher.find()) { return matcher.group(); } return null; } /** * get remain time(s) * * @return remain time */ private long getRemaintime() { long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000; long remainTime = taskExecutionContext.getTaskTimeout() - usedTime; if (remainTime < 0) { throw new RuntimeException("task execution time out"); } return remainTime; } /** * get process id * * @param process process * @return process id */ private int getProcessId(Process process) { int processId = 0; try { Field f = process.getClass().getDeclaredField(Constants.PID); f.setAccessible(true); processId = f.getInt(process); } catch (Throwable e) { logger.error(e.getMessage(), e); } return processId; } /** * when log buffer siz or flush time reach condition , then flush * * @param lastFlushTime last flush time * @return last flush time */ private long flush(long lastFlushTime) { long now = System.currentTimeMillis(); /** * when log buffer siz or flush time reach condition , then flush */ if (logBuffer.size() >= Constants.DEFAULT_LOG_ROWS_NUM || now - lastFlushTime > Constants.DEFAULT_LOG_FLUSH_INTERVAL) { lastFlushTime = now; /** log handle */ logHandler.accept(logBuffer); logBuffer.clear(); } return lastFlushTime; } /** * close buffer reader * * @param inReader in reader */ private void close(BufferedReader inReader) { if (inReader != null) { try { inReader.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } protected List<String> commandOptions() { return Collections.emptyList(); } protected abstract String buildCommandFilePath(); protected abstract String commandInterpreter(); protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,775
[Improvement][Worker] Task log may be lost
**Describe the question** All the code in this issue is at `AbstractCommandExecutor`. If you have seen the task logging code in DolphinScheduler, you probably know that we currently use the production-consumption model for logging. We use one thread to write the log to the collection. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L351-L371 And we use another thread to consumer the log from collection and write to log file. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L375-L390 https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L549-L562 The problem is that when executing between line 558 and line 560, the logs added to the collection will be lost.(`logHandler.accept` will take some time). **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** There are two ways to solve the issue: 1. We can use a block queue to store the log instead of using `Collections.synchronizedList`. 2. We can direct use a read write lock to in `AbstractCommandExecutor` to solve the concurrent problem, instead of using `Collections.synchronizedList`
https://github.com/apache/dolphinscheduler/issues/5775
https://github.com/apache/dolphinscheduler/pull/5783
626c47399af4b7e8a839165d9d36fdbe04cc54cd
30af55b82ae560a5300930af062ad3a88d542e3d
"2021-07-08T13:49:44Z"
java
"2021-07-09T17:14:59Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import java.util.List; import java.util.StringJoiner; import org.slf4j.Logger; /** * executive task */ public abstract class AbstractTask { /** * taskExecutionContext **/ TaskExecutionContext taskExecutionContext; /** * log record */ protected Logger logger; /** * SHELL process pid */ protected int processId; /** * other resource manager appId , for example : YARN etc */ protected String appIds; /** * cancel */ protected volatile boolean cancel = false; /** * exit code */ protected volatile int exitStatusCode = -1; /** * constructor * * @param taskExecutionContext taskExecutionContext * @param logger logger */ protected AbstractTask(TaskExecutionContext taskExecutionContext, Logger logger) { this.taskExecutionContext = taskExecutionContext; this.logger = logger; } /** * init task * * @throws Exception exception */ public void init() throws Exception { } /** * task handle * * @throws Exception exception */ public abstract void handle() throws Exception; /** * result processing * * @throws Exception exception */ public void after() throws Exception { } /** * cancel application * * @param status status * @throws Exception exception */ public void cancelApplication(boolean status) throws Exception { this.cancel = status; } /** * log handle * * @param logs log list */ public void logHandle(List<String> logs) { // note that the "new line" is added here to facilitate log parsing if (logs.contains(FINALIZE_SESSION_MARKER.toString())) { logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString()); } else { // note: if the logs is a SynchronizedList and will be modified concurrently, // we should must use foreach to iterate the element, otherwise will throw a ConcurrentModifiedException(#issue 5528) StringJoiner joiner = new StringJoiner("\n\t"); logs.forEach(joiner::add); logger.info(" -> {}", joiner); } } /** * get exit status code * * @return exit status code */ public int getExitStatusCode() { return exitStatusCode; } public void setExitStatusCode(int exitStatusCode) { this.exitStatusCode = exitStatusCode; } public String getAppIds() { return appIds; } public void setAppIds(String appIds) { this.appIds = appIds; } public int getProcessId() { return processId; } public void setProcessId(int processId) { this.processId = processId; } /** * get task parameters * * @return AbstractParameters */ public abstract AbstractParameters getParameters(); private boolean typeIsNormalTask(String taskType) { return !(TaskType.SUB_PROCESS.getDesc().equalsIgnoreCase(taskType) || TaskType.DEPENDENT.getDesc().equalsIgnoreCase(taskType)); } /** * get exit status according to exitCode * * @return exit status */ public ExecutionStatus getExitStatus() { ExecutionStatus status; switch (getExitStatusCode()) { case Constants.EXIT_CODE_SUCCESS: status = ExecutionStatus.SUCCESS; break; case Constants.EXIT_CODE_KILL: status = ExecutionStatus.KILL; break; default: status = ExecutionStatus.FAILURE; break; } return status; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,775
[Improvement][Worker] Task log may be lost
**Describe the question** All the code in this issue is at `AbstractCommandExecutor`. If you have seen the task logging code in DolphinScheduler, you probably know that we currently use the production-consumption model for logging. We use one thread to write the log to the collection. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L351-L371 And we use another thread to consumer the log from collection and write to log file. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L375-L390 https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L549-L562 The problem is that when executing between line 558 and line 560, the logs added to the collection will be lost.(`logHandler.accept` will take some time). **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** There are two ways to solve the issue: 1. We can use a block queue to store the log instead of using `Collections.synchronizedList`. 2. We can direct use a read write lock to in `AbstractCommandExecutor` to solve the concurrent problem, instead of using `Collections.synchronizedList`
https://github.com/apache/dolphinscheduler/issues/5775
https://github.com/apache/dolphinscheduler/pull/5783
626c47399af4b7e8a839165d9d36fdbe04cc54cd
30af55b82ae560a5300930af062ad3a88d542e3d
"2021-07-08T13:49:44Z"
java
"2021-07-09T17:14:59Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Collections; import java.util.List; import java.util.function.Consumer; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * python command executor */ public class PythonCommandExecutor extends AbstractCommandExecutor { /** * logger */ private static final Logger logger = LoggerFactory.getLogger(PythonCommandExecutor.class); /** * python */ public static final String PYTHON = "python"; private static final Pattern PYTHON_PATH_PATTERN = Pattern.compile("/bin/python[\\d.]*$"); /** * constructor * @param logHandler log handler * @param taskExecutionContext taskExecutionContext * @param logger logger */ public PythonCommandExecutor(Consumer<List<String>> logHandler, TaskExecutionContext taskExecutionContext, Logger logger) { super(logHandler,taskExecutionContext,logger); } /** * build command file path * * @return command file path */ @Override protected String buildCommandFilePath() { return String.format("%s/py_%s.command", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); } /** * create command file if not exists * @param execCommand exec command * @param commandFile command file * @throws IOException io exception */ @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("tenantCode :{}, task dir:{}", taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); if (!Files.exists(Paths.get(commandFile))) { logger.info("generate command file:{}", commandFile); StringBuilder sb = new StringBuilder(); sb.append("#-*- encoding=utf8 -*-\n"); sb.append("\n\n"); sb.append(execCommand); logger.info(sb.toString()); // write data to file FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); } } /** * get command options * @return command options list */ @Override protected List<String> commandOptions() { // unbuffered binary stdout and stderr return Collections.singletonList("-u"); } /** * Gets the command path to which Python can execute * @return python command path */ @Override protected String commandInterpreter() { String pythonHome = getPythonHome(taskExecutionContext.getEnvFile()); return getPythonCommand(pythonHome); } /** * get python command * * @param pythonHome python home * @return python command */ public static String getPythonCommand(String pythonHome) { if (StringUtils.isEmpty(pythonHome)) { return PYTHON; } File file = new File(pythonHome); if (file.exists() && file.isFile()) { return pythonHome; } if (PYTHON_PATH_PATTERN.matcher(pythonHome).find()) { return pythonHome; } return Paths.get(pythonHome, "/bin/python").toString(); } /** * get python home * * @param envPath env path * @return python home */ public static String getPythonHome(String envPath) { BufferedReader br = null; StringBuilder sb = new StringBuilder(); try { br = new BufferedReader(new InputStreamReader(new FileInputStream(envPath))); String line; while ((line = br.readLine()) != null) { if (line.contains(Constants.PYTHON_HOME)) { sb.append(line); break; } } String result = sb.toString(); if (StringUtils.isEmpty(result)) { return null; } String[] arrs = result.split(Constants.EQUAL_SIGN); if (arrs.length == 2) { return arrs[1]; } } catch (IOException e) { logger.error("read file failure", e); } finally { try { if (br != null) { br.close(); } } catch (IOException e) { logger.error(e.getMessage(), e); } } return null; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,775
[Improvement][Worker] Task log may be lost
**Describe the question** All the code in this issue is at `AbstractCommandExecutor`. If you have seen the task logging code in DolphinScheduler, you probably know that we currently use the production-consumption model for logging. We use one thread to write the log to the collection. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L351-L371 And we use another thread to consumer the log from collection and write to log file. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L375-L390 https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L549-L562 The problem is that when executing between line 558 and line 560, the logs added to the collection will be lost.(`logHandler.accept` will take some time). **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** There are two ways to solve the issue: 1. We can use a block queue to store the log instead of using `Collections.synchronizedList`. 2. We can direct use a read write lock to in `AbstractCommandExecutor` to solve the concurrent problem, instead of using `Collections.synchronizedList`
https://github.com/apache/dolphinscheduler/issues/5775
https://github.com/apache/dolphinscheduler/pull/5783
626c47399af4b7e8a839165d9d36fdbe04cc54cd
30af55b82ae560a5300930af062ad3a88d542e3d
"2021-07-08T13:49:44Z"
java
"2021-07-09T17:14:59Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; import java.util.function.Consumer; import org.slf4j.Logger; /** * shell command executor */ public class ShellCommandExecutor extends AbstractCommandExecutor { /** * For Unix-like, using sh */ public static final String SH = "sh"; /** * For Windows, using cmd.exe */ public static final String CMD = "cmd.exe"; /** * constructor * @param logHandler logHandler * @param taskExecutionContext taskExecutionContext * @param logger logger */ public ShellCommandExecutor(Consumer<List<String>> logHandler, TaskExecutionContext taskExecutionContext, Logger logger) { super(logHandler,taskExecutionContext,logger); } public ShellCommandExecutor(List<String> logBuffer) { super(logBuffer); } @Override protected String buildCommandFilePath() { // command file return String.format("%s/%s.%s" , taskExecutionContext.getExecutePath() , taskExecutionContext.getTaskAppId() , OSUtils.isWindows() ? "bat" : "command"); } /** * get command type * @return command type */ @Override protected String commandInterpreter() { return OSUtils.isWindows() ? CMD : SH; } /** * create command file if not exists * @param execCommand exec command * @param commandFile command file * @throws IOException io exception */ @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { logger.info("tenantCode user:{}, task dir:{}", taskExecutionContext.getTenantCode(), taskExecutionContext.getTaskAppId()); // create if non existence if (!Files.exists(Paths.get(commandFile))) { logger.info("create command file:{}", commandFile); StringBuilder sb = new StringBuilder(); if (OSUtils.isWindows()) { sb.append("@echo off\n"); sb.append("cd /d %~dp0\n"); if (taskExecutionContext.getEnvFile() != null) { sb.append("call ").append(taskExecutionContext.getEnvFile()).append("\n"); } } else { sb.append("#!/bin/sh\n"); sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); sb.append("cd $BASEDIR\n"); if (taskExecutionContext.getEnvFile() != null) { sb.append("source ").append(taskExecutionContext.getEnvFile()).append("\n"); } } sb.append(execCommand); logger.info("command : {}", sb.toString()); // write data to file FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,775
[Improvement][Worker] Task log may be lost
**Describe the question** All the code in this issue is at `AbstractCommandExecutor`. If you have seen the task logging code in DolphinScheduler, you probably know that we currently use the production-consumption model for logging. We use one thread to write the log to the collection. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L351-L371 And we use another thread to consumer the log from collection and write to log file. https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L375-L390 https://github.com/apache/dolphinscheduler/blob/b114d330ac1fa7de27e09cc73c0804a7536f3b28/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java#L549-L562 The problem is that when executing between line 558 and line 560, the logs added to the collection will be lost.(`logHandler.accept` will take some time). **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** There are two ways to solve the issue: 1. We can use a block queue to store the log instead of using `Collections.synchronizedList`. 2. We can direct use a read write lock to in `AbstractCommandExecutor` to solve the concurrent problem, instead of using `Collections.synchronizedList`
https://github.com/apache/dolphinscheduler/issues/5775
https://github.com/apache/dolphinscheduler/pull/5783
626c47399af4b7e8a839165d9d36fdbe04cc54cd
30af55b82ae560a5300930af062ad3a88d542e3d
"2021-07-08T13:49:44Z"
java
"2021-07-09T17:14:59Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sqoop; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; /** * sqoop task test */ @RunWith(MockitoJUnitRunner.Silent.class) public class SqoopTaskTest { private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class); private SqoopTask sqoopTask; @Before public void before() { ProcessService processService = Mockito.mock(ProcessService.class); ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); taskExecutionContext.setTaskAppId(String.valueOf(System.currentTimeMillis())); taskExecutionContext.setTenantCode("1"); taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh"); taskExecutionContext.setStartTime(new Date()); taskExecutionContext.setTaskTimeout(0); taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," + "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," + "\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" + ",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," + "\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," + "\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); sqoopTask = new SqoopTask(taskExecutionContext, logger); //test sqoop task init method sqoopTask.init(); } /** * test SqoopJobGenerator */ @Test public void testGenerator() { TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext(); //sqoop TEMPLATE job //import mysql to HDFS with hadoop String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}]," + "\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}],\"jobType\":\"TEMPLATE\",\"concurrency\":1," + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\"," + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\"," + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\"," + "\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs, SqoopParameters.class); SqoopJobGenerator generator = new SqoopJobGenerator(); String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext); String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect " + "\"jdbc:mysql://192.168.0.111:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\" " + "--username kylo --password \"123456\" --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile " + "--delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript); //export hdfs to mysql using update mode String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\"," + "\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\"," + "\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class); String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext); String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect " + "\"jdbc:mysql://192.168.0.111:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\" " + "--username kylo --password \"123456\" --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' " + "--lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript); //export hive to mysql String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\"," + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\"," + "\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\"," + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\"," + "\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql, SqoopParameters.class); String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext); String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date " + "--hcatalog-partition-values 2020-02-17 --connect \"jdbc:mysql://192.168.0.111:3306/test?allowLoadLocalInfile=" + "false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\" --username kylo --password \"123456\" --table person_3 " + "--fields-terminated-by '@' --lines-terminated-by '\\n'"; Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript); //import mysql to hive String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\"," + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\"," + "\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[]," + "\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"," + "\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false," + "\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class); String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext); String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect \"jdbc:mysql://192.168.0.111:3306/" + "test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\" " + "--username kylo --password \"123456\" " + "--query \"SELECT * FROM person_2 WHERE \\$CONDITIONS\" --map-column-java id=Integer --hive-import --hive-database stg --hive-table person_internal_2 " + "--create-hive-table --hive-overwrite --delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript); //sqoop CUSTOM job String sqoopCustomString = "{\"jobType\":\"CUSTOM\",\"localParams\":[],\"customShell\":\"sqoop import\"}"; SqoopParameters sqoopCustomParams = JSONUtils.parseObject(sqoopCustomString, SqoopParameters.class); String sqoopCustomScript = generator.generateSqoopJob(sqoopCustomParams, new TaskExecutionContext()); String sqoopCustomExpected = "sqoop import"; Assert.assertEquals(sqoopCustomExpected, sqoopCustomScript); } /** * get taskExecutionContext include mysql * * @return TaskExecutionContext */ private TaskExecutionContext getMysqlTaskExecutionContext() { TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; sqoopTaskExecutionContext.setDataSourceId(2); sqoopTaskExecutionContext.setDataTargetId(2); sqoopTaskExecutionContext.setSourcetype(0); sqoopTaskExecutionContext.setTargetConnectionParams(mysqlTargetConnectionParams); sqoopTaskExecutionContext.setSourceConnectionParams(mysqlSourceConnectionParams); sqoopTaskExecutionContext.setTargetType(0); taskExecutionContext.setSqoopTaskExecutionContext(sqoopTaskExecutionContext); return taskExecutionContext; } @Test public void testGetParameters() { Assert.assertNotNull(sqoopTask.getParameters()); } /** * Method: init */ @Test public void testInit() { try { sqoopTask.init(); } catch (Exception e) { Assert.fail(e.getMessage()); } } @Test public void testLogHandler() throws InterruptedException { List<String> list = Collections.synchronizedList(new ArrayList<>()); Thread thread1 = new Thread(() -> { for (int i = 0; i < 10; i++) { list.add("test add log"); } }); Thread thread2 = new Thread(() -> { for (int i = 0; i < 10; i++) { sqoopTask.logHandle(list); } }); thread1.start(); thread2.start(); thread1.join(); thread2.join(); // if no exception throw, assert true Assert.assertTrue(true); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,725
[Bug][CheckStyle] Import checkstyle config file in idea's checkstyle plugin, get a CheckStyleException
Import checkstyle config file(https://github.com/apache/dolphinscheduler/blob/dev/style/checkstyle.xml), IDEA throw a CheckStyleException. The information is as follows: **Which version of Dolphin Scheduler:** -[dev] **Idea info:** ![image](https://user-images.githubusercontent.com/4127712/123929326-50a77780-d9c1-11eb-9252-a9ca268bde74.png) **Exception info:** ![微信截图_20210630141057](https://user-images.githubusercontent.com/4127712/123929490-716fcd00-d9c1-11eb-9d97-3de6228be1f3.png)
https://github.com/apache/dolphinscheduler/issues/5725
https://github.com/apache/dolphinscheduler/pull/5789
30af55b82ae560a5300930af062ad3a88d542e3d
16986c3c651af38469c6d4cb03a587fd174c9a9b
"2021-06-30T08:41:21Z"
java
"2021-07-10T16:31:45Z"
.github/workflows/ci_ut.yml
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # on: pull_request: push: branches: - dev env: LOG_DIR: /tmp/dolphinscheduler name: Unit Test jobs: build: name: Build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodule: true - name: Check License Header uses: apache/skywalking-eyes@ec88b7d850018c8983f87729ea88549e100c5c82 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Only enable review / suggestion here - uses: actions/cache@v1 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-maven- - name: Bootstrap database run: | sed -i "/image: bitnami\/postgresql/a\ ports:\n - 5432:5432" $(pwd)/docker/docker-swarm/docker-compose.yml sed -i "/image: bitnami\/zookeeper/a\ ports:\n - 2181:2181" $(pwd)/docker/docker-swarm/docker-compose.yml docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml up -d dolphinscheduler-zookeeper dolphinscheduler-postgresql until docker logs docker-swarm_dolphinscheduler-postgresql_1 2>&1 | grep 'listening on IPv4 address'; do echo "waiting for postgresql ready ..."; sleep 1; done docker run --rm --network docker-swarm_dolphinscheduler -v $(pwd)/sql/dolphinscheduler_postgre.sql:/docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql bitnami/postgresql:latest bash -c "PGPASSWORD=root psql -h docker-swarm_dolphinscheduler-postgresql_1 -U root -d dolphinscheduler -v ON_ERROR_STOP=1 -f /docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql" - name: Set up JDK 1.8 uses: actions/setup-java@v1 with: java-version: 1.8 - name: Git fetch unshallow run: | git fetch --unshallow git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" git fetch origin - name: Compile run: | export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx5g' mvn test -B -Dmaven.test.skip=false - name: Upload coverage report to codecov run: | CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash) # Set up JDK 11 for SonarCloud. - name: Set up JDK 1.11 uses: actions/setup-java@v1 with: java-version: 1.11 - name: Run SonarCloud Analysis run: > mvn --batch-mode verify sonar:sonar -Dsonar.coverage.jacoco.xmlReportPaths=target/site/jacoco/jacoco.xml -Dmaven.test.skip=true -Dsonar.host.url=https://sonarcloud.io -Dsonar.organization=apache -Dsonar.core.codeCoveragePlugin=jacoco -Dsonar.projectKey=apache-dolphinscheduler -Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682 -Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/* env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - name: Collect logs run: | mkdir -p ${LOG_DIR} docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt continue-on-error: true Checkstyle: name: Check code style runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 with: submodule: true - name: check code style env: WORKDIR: ./ REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} CHECKSTYLE_CONFIG: style/checkstyle.xml REVIEWDOG_VERSION: v0.10.2 run: | wget -O - -q https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.22/checkstyle-8.22-all.jar > /opt/checkstyle.jar wget -O - -q https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh | sh -s -- -b /opt ${REVIEWDOG_VERSION} java -jar /opt/checkstyle.jar "${WORKDIR}" -c "${CHECKSTYLE_CONFIG}" -f xml \ | /opt/reviewdog -f=checkstyle \ -reporter="${INPUT_REPORTER:-github-pr-check}" \ -filter-mode="${INPUT_FILTER_MODE:-added}" \ -fail-on-error="${INPUT_FAIL_ON_ERROR:-false}"
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,725
[Bug][CheckStyle] Import checkstyle config file in idea's checkstyle plugin, get a CheckStyleException
Import checkstyle config file(https://github.com/apache/dolphinscheduler/blob/dev/style/checkstyle.xml), IDEA throw a CheckStyleException. The information is as follows: **Which version of Dolphin Scheduler:** -[dev] **Idea info:** ![image](https://user-images.githubusercontent.com/4127712/123929326-50a77780-d9c1-11eb-9252-a9ca268bde74.png) **Exception info:** ![微信截图_20210630141057](https://user-images.githubusercontent.com/4127712/123929490-716fcd00-d9c1-11eb-9d97-3de6228be1f3.png)
https://github.com/apache/dolphinscheduler/issues/5725
https://github.com/apache/dolphinscheduler/pull/5789
30af55b82ae560a5300930af062ad3a88d542e3d
16986c3c651af38469c6d4cb03a587fd174c9a9b
"2021-06-30T08:41:21Z"
java
"2021-07-10T16:31:45Z"
style/checkstyle.xml
<?xml version="1.0"?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://checkstyle.sourceforge.net/dtds/configuration_1_3.dtd"> <module name="Checker"> <property name="charset" value="UTF-8"/> <property name="severity" value="info"/> <property name="fileExtensions" value="java, properties, xml"/> <module name="FileTabCharacter"> <property name="eachLine" value="true"/> </module> <module name="SuppressionFilter"> <property name="file" value="${checkstyle.suppressions.file}" default="checkstyle-suppressions.xml"/> <property name="optional" value="true"/> </module> <module name="TreeWalker"> <module name="OuterTypeFilename"> <property name="severity" value="error"/> </module> <module name="OneTopLevelClass"> <property name="severity" value="error"/> </module> <module name="NoLineWrap"> <property name="severity" value="error"/> </module> <module name="NeedBraces"> <property name="severity" value="error"/> </module> <module name="LeftCurly"> <property name="severity" value="error"/> </module> <module name="AvoidNestedBlocks"> <property name="allowInSwitchCase" value="true"/> </module> <module name="IllegalTokenText"> <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/> <property name="format" value="\\u00(09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/> <property name="message" value="Consider using special escape sequence instead of octal value or Unicode escaped value."/> </module> <module name="AvoidEscapedUnicodeCharacters"> <property name="allowEscapesForControlCharacters" value="true"/> <property name="allowByTailComment" value="true"/> <property name="allowNonPrintableEscapes" value="true"/> </module> <module name="LineLength"> <property name="max" value="200"/> <property name="ignorePattern" value="^ *\* *[^ ]+$"/> </module> <module name="EmptyBlock"> <property name="option" value="TEXT"/> <property name="tokens" value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/> </module> <module name="WhitespaceAround"> <property name="allowEmptyConstructors" value="true"/> <property name="allowEmptyMethods" value="true"/> <property name="allowEmptyTypes" value="true"/> <property name="allowEmptyLoops" value="true"/> <message key="ws.notFollowed" value="WhitespaceAround: ''{0}'' is not followed by whitespace. Empty blocks may only be represented as '{}' when not part of a multi-block statement (4.1.3)"/> <message key="ws.notPreceded" value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/> </module> <module name="OneStatementPerLine"/> <module name="MultipleVariableDeclarations"/> <module name="ArrayTypeStyle"/> <module name="MissingSwitchDefault"/> <module name="FallThrough"/> <module name="UpperEll"/> <module name="ModifierOrder"/> <module name="EmptyLineSeparator"> <property name="allowMultipleEmptyLines" value="false"/> <property name="allowMultipleEmptyLinesInsideClassMembers" value="false"/> <property name="tokens" value="PACKAGE_DEF, IMPORT, STATIC_IMPORT, CLASS_DEF, INTERFACE_DEF, ENUM_DEF, STATIC_INIT, INSTANCE_INIT, METHOD_DEF, CTOR_DEF"/> </module> <module name="PackageName"> <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/> <message key="name.invalidPattern" value="Package name ''{0}'' must match pattern ''{1}''."/> </module> <module name="TypeName"> <message key="name.invalidPattern" value="Type name ''{0}'' must match pattern ''{1}''."/> </module> <module name="MemberName"> <property name="format" value="^[a-z][a-zA-Z0-9]*$"/> <message key="name.invalidPattern" value="Member name ''{0}'' must match pattern ''{1}''."/> </module> <module name="ParameterName"> <property name="format" value="^[a-z]([a-zA-Z0-9]*)?$"/> <message key="name.invalidPattern" value="Parameter name ''{0}'' must match pattern ''{1}''."/> </module> <module name="CatchParameterName"> <property name="format" value="^[a-z]([a-zA-Z0-9]*)?$"/> <message key="name.invalidPattern" value="Catch parameter name ''{0}'' must match pattern ''{1}''."/> </module> <module name="LocalVariableName"> <property name="tokens" value="VARIABLE_DEF"/> <property name="format" value="^[a-z]([a-zA-Z0-9]*)?$"/> <message key="name.invalidPattern" value="Local variable name ''{0}'' must match pattern ''{1}''."/> </module> <module name="ClassTypeParameterName"> <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/> <message key="name.invalidPattern" value="Class type name ''{0}'' must match pattern ''{1}''."/> </module> <module name="MethodTypeParameterName"> <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/> <message key="name.invalidPattern" value="Method type name ''{0}'' must match pattern ''{1}''."/> </module> <module name="InterfaceTypeParameterName"> <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/> <message key="name.invalidPattern" value="Interface type name ''{0}'' must match pattern ''{1}''."/> </module> <module name="NoFinalizer"/> <module name="GenericWhitespace"> <message key="ws.followed" value="GenericWhitespace ''{0}'' is followed by whitespace."/> <message key="ws.preceded" value="GenericWhitespace ''{0}'' is preceded with whitespace."/> <message key="ws.illegalFollow" value="GenericWhitespace ''{0}'' should followed by whitespace."/> <message key="ws.notPreceded" value="GenericWhitespace ''{0}'' is not preceded with whitespace."/> </module> <module name="Indentation"> <property name="basicOffset" value="4"/> <property name="braceAdjustment" value="0"/> <property name="caseIndent" value="4"/> <property name="throwsIndent" value="2"/> <property name="lineWrappingIndentation" value="4"/> <property name="arrayInitIndent" value="4"/> </module> <module name="IllegalImport"> <property name="regexp" value="true"/> <property name="illegalPkgs" value="^com\.google\.api\.client\.repackaged, ^avro\.shaded, ^org\.apache\.hadoop\.hbase\.shaded, ^org\.apache\.hadoop\.shaded, ^javax\.ws\.rs\.ext, ^cc\.concurrent\.mango\.util\.concurrent, ^org\.apache\.curator-test\.shaded, ^com\.sun\.istack, ^org\.apache\.commons\.lang, ^org\.jetbrains\.annotations, ^jline\.internal, ^com\.cronutils\.utils, ^javax\.ws\.rs\.ext, ^org\.jboss\.netty\.util\.internal, ^com\.sun\.javafx, ^io\.reactivex\.annotations, ^org\.codehaus\.jackson"/> <property name="illegalClasses" value="^java\.util\.logging\.Logging, ^sun\.misc\.BASE64Encoder, ^sun\.misc\.BASE64Decoder, ^jdk\.internal\.jline\.internal\.Nullable"/> </module> <module name="RedundantImport"/> <module name="UnusedImports"/> <module name="ImportOrder"> <property name="staticGroups" value="org.apache.dolphinscheduler,org.apache,java,javax,org,com"/> <property name="separatedStaticGroups" value="true"/> <property name="groups" value="org.apache.dolphinscheduler,org.apache,java,javax,org,com"/> <property name="ordered" value="true"/> <property name="separated" value="true"/> <property name="option" value="top"/> <property name="sortStaticImportsAlphabetically" value="true"/> </module> <module name="NoWhitespaceBefore"> <property name="tokens" value="COMMA, SEMI, POST_INC, POST_DEC, DOT, ELLIPSIS, METHOD_REF"/> <property name="allowLineBreaks" value="true"/> </module> <module name="ParenPad"/> <module name="OperatorWrap"> <property name="option" value="NL"/> <property name="tokens" value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR, LT, MINUS, MOD, NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR, METHOD_REF "/> </module> <module name="AnnotationLocation"> <property name="allowSamelineMultipleAnnotations" value="false"/> <property name="allowSamelineSingleParameterlessAnnotation" value="false"/> <property name="allowSamelineParameterizedAnnotation" value="true"/> <property name="tokens" value="METHOD_DEF, CTOR_DEF"/> </module> <module name="MethodName"> <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/> <message key="name.invalidPattern" value="Method name ''{0}'' must match pattern ''{1}''."/> </module> <module name="EmptyCatchBlock"> <property name="exceptionVariableName" value="expected"/> </module> <module name="CommentsIndentation"/> <module name="EmptyStatement"> <property name="severity" value="error"/> </module> <module name="JavadocStyle"> <property name="endOfSentenceFormat" value=""/> </module> <module name="JavadocType"> <property name="scope" value="protected"/> <property name="allowMissingParamTags" value="true"/> </module> <module name="AvoidStarImport"/> </module> </module>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,778
[Feature][JsonSplit-api]schedule list、preview、delete interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5778
https://github.com/apache/dolphinscheduler/pull/5779
72535a47e3dafc68c457996ea6e01b8da17685aa
d2a9e05a664a9bf12a29ae1873183b0173c0bf49
"2021-07-09T08:56:03Z"
java
"2021-07-12T02:21:17Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * scheduler controller */ @Api(tags = "SCHEDULER_TAG") @RestController @RequestMapping("/projects/{projectCode}/schedule") public class SchedulerController extends BaseController { public static final String DEFAULT_WARNING_TYPE = "NONE"; public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM"; @Autowired private SchedulerService schedulerService; /** * create schedule * * @param loginUser login user * @param projectCode project code * @param processDefinitionCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode") long processDefinitionCode, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectCode, processDefinitionCode, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ @ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/update") @ApiException(UPDATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "id") Integer id, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } /** * publish schedule setScheduleState * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @return publish result code */ @ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/online") @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectCode, id, ReleaseState.ONLINE); return returnDataList(result); } /** * offline schedule * * @param loginUser login user * @param projectCode project code * @param id schedule id * @return operation result code */ @ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/offline") @ApiException(OFFLINE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("id") Integer id) { Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectCode, id, ReleaseState.OFFLINE); return returnDataList(result); } /** * query schedule list paging * * @param loginUser login user * @param projectCode project code * @param processDefinitionCode process definition code * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") }) @GetMapping("/list-paging") @ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam long processDefinitionCode, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { Map<String, Object> result = checkPageParams(pageNo, pageSize); if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); } searchVal = ParameterUtils.handleEscapes(searchVal); result = schedulerService.querySchedule(loginUser, projectCode, processDefinitionCode, searchVal, pageNo, pageSize); return returnDataListPaging(result); } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, @PathVariable String projectName, @RequestParam("scheduleId") Integer scheduleId ) { Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); return returnDataList(result); } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES") @PostMapping("/list") @ApiException(QUERY_SCHEDULE_LIST_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName); return returnDataList(result); } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), }) @PostMapping("/preview") @ResponseStatus(HttpStatus.CREATED) @ApiException(PREVIEW_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "schedule") String schedule ) { Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,778
[Feature][JsonSplit-api]schedule list、preview、delete interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5778
https://github.com/apache/dolphinscheduler/pull/5779
72535a47e3dafc68c457996ea6e01b8da17685aa
d2a9e05a664a9bf12a29ae1873183b0173c0bf49
"2021-07-09T08:56:03Z"
java
"2021-07-12T02:21:17Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * scheduler service */ public interface SchedulerService { /** * save schedule * * @param loginUser login user * @param projectCode project code * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup); /** * set schedule online or offline * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ Map<String, Object> setScheduleState(User loginUser, long projectCode, Integer id, ReleaseState scheduleStatus); /** * query schedule * * @param loginUser login user * @param projectCode project code * @param processDefineCode process definition code * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ Map<String, Object> querySchedule(User loginUser, long projectCode, long processDefineCode, String searchVal, Integer pageNo, Integer pageSize); /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ Map<String, Object> queryScheduleList(User loginUser, String projectName); /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ void deleteSchedule(int projectId, int scheduleId); /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId); /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,778
[Feature][JsonSplit-api]schedule list、preview、delete interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5778
https://github.com/apache/dolphinscheduler/pull/5779
72535a47e3dafc68c457996ea6e01b8da17685aa
d2a9e05a664a9bf12a29ae1873183b0173c0bf49
"2021-07-09T08:56:03Z"
java
"2021-07-12T02:21:17Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * scheduler service impl */ @Service public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); @Autowired private ProjectService projectService; @Autowired private ExecutorService executorService; @Autowired private MonitorService monitorService; @Autowired private ProcessService processService; @Autowired private ScheduleMapper scheduleMapper; @Autowired private ProjectMapper projectMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * save schedule * * @param loginUser login user * @param projectCode project name * @param processDefineCode process definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority * @param workerGroup worker group * @return create result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> insertSchedule(User loginUser, long projectCode, long processDefineCode, String schedule, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check work flow define release state ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); result = executorService.checkProcessDefinitionValid(processDefinition, processDefineCode); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } Schedule scheduleObj = new Schedule(); Date now = new Date(); scheduleObj.setProjectName(project.getName()); scheduleObj.setProcessDefinitionId(processDefinition.getId()); scheduleObj.setProcessDefinitionName(processDefinition.getName()); ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } scheduleObj.setStartTime(scheduleParam.getStartTime()); scheduleObj.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { logger.error("{} verify failure", scheduleParam.getCrontab()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); return result; } scheduleObj.setCrontab(scheduleParam.getCrontab()); scheduleObj.setTimezoneId(scheduleParam.getTimezoneId()); scheduleObj.setWarningType(warningType); scheduleObj.setWarningGroupId(warningGroupId); scheduleObj.setFailureStrategy(failureStrategy); scheduleObj.setCreateTime(now); scheduleObj.setUpdateTime(now); scheduleObj.setUserId(loginUser.getId()); scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setProcessInstancePriority(processInstancePriority); scheduleObj.setWorkerGroup(workerGroup); scheduleMapper.insert(scheduleObj); /** * updateProcessInstance receivers and cc by process definition id */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); // return scheduler object with ID result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); putMsg(result, Status.SUCCESS); result.put("scheduleId", scheduleObj.getId()); return result; } /** * updateProcessInstance schedule * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleExpression scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority * @return update result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> updateSchedule(User loginUser, long projectCode, Integer id, String scheduleExpression, WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, Priority processInstancePriority, String workerGroup) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule schedule = scheduleMapper.selectById(id); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); return result; } /** * scheduling on-line status forbid modification */ if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { return result; } Date now = new Date(); // updateProcessInstance param if (StringUtils.isNotEmpty(scheduleExpression)) { ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } schedule.setStartTime(scheduleParam.getStartTime()); schedule.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); return result; } schedule.setCrontab(scheduleParam.getCrontab()); schedule.setTimezoneId(scheduleParam.getTimezoneId()); } if (warningType != null) { schedule.setWarningType(warningType); } schedule.setWarningGroupId(warningGroupId); if (failureStrategy != null) { schedule.setFailureStrategy(failureStrategy); } schedule.setWorkerGroup(workerGroup); schedule.setUpdateTime(now); schedule.setProcessInstancePriority(processInstancePriority); scheduleMapper.updateById(schedule); /** * updateProcessInstance recipients and cc by process definition ID */ processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); return result; } /** * set schedule online or offline * * @param loginUser login user * @param projectCode project code * @param id scheduler id * @param scheduleStatus schedule status * @return publish result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> setScheduleState(User loginUser, long projectCode, Integer id, ReleaseState scheduleStatus) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } // check schedule exists Schedule scheduleObj = scheduleMapper.selectById(id); if (scheduleObj == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); return result; } // check schedule release state if (scheduleObj.getReleaseState() == scheduleStatus) { logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); return result; } ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); return result; } if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); return result; } // check sub process definition release state List<Integer> subProcessDefineIds = new ArrayList<>(); processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); if (!subProcessDefineIds.isEmpty()) { List<ProcessDefinition> subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { /** * if there is no online process, exit directly */ if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", subProcessDefinition.getId(), subProcessDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); return result; } } } } } // check master server exists List<Server> masterServers = monitorService.getServerListFromRegistry(true); if (masterServers.isEmpty()) { putMsg(result, Status.MASTER_NOT_EXISTS); return result; } // set status scheduleObj.setReleaseState(scheduleStatus); scheduleMapper.updateById(scheduleObj); try { switch (scheduleStatus) { case ONLINE: logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); setSchedule(project.getId(), scheduleObj); break; case OFFLINE: logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); deleteSchedule(project.getId(), id); break; default: putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); return result; } } catch (Exception e) { result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); throw new ServiceException(result.get(Constants.MSG).toString()); } putMsg(result, Status.SUCCESS); return result; } /** * query schedule * * @param loginUser login user * @param projectCode project code * @param processDefineCode process definition code * @param pageNo page number * @param pageSize page size * @param searchVal search value * @return schedule list page */ @Override public Map<String, Object> querySchedule(User loginUser, long projectCode, long processDefineCode, String searchVal, Integer pageNo, Integer pageSize) { HashMap<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineCode); return result; } Page<Schedule> page = new Page<>(pageNo, pageSize); IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(page, processDefinition.getId(), searchVal); PageInfo<Schedule> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int)scheduleIPage.getTotal()); pageInfo.setLists(scheduleIPage.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * query schedule list * * @param loginUser login user * @param projectName project name * @return schedule list */ @Override public Map<String, Object> queryScheduleList(User loginUser, String projectName) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName); result.put(Constants.DATA_LIST, schedules); putMsg(result, Status.SUCCESS); return result; } public void setSchedule(int projectId, Schedule schedule) { logger.info("set schedule, project id: {}, scheduleId: {}", projectId, schedule.getId()); QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, projectId, schedule); } /** * delete schedule * * @param projectId project id * @param scheduleId schedule id * @throws RuntimeException runtime exception */ @Override public void deleteSchedule(int projectId, int scheduleId) { logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); String jobName = QuartzExecutors.buildJobName(scheduleId); String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); throw new ServiceException("set offline failure"); } } /** * check valid * * @param result result * @param bool bool * @param status status * @return check result code */ private boolean checkValid(Map<String, Object> result, boolean bool, Status status) { // timeout is valid if (bool) { putMsg(result, status); return true; } return false; } /** * delete schedule by id * * @param loginUser login user * @param projectName project name * @param scheduleId scheule id * @return delete result code */ @Override public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Status resultEnum = (Status) checkResult.get(Constants.STATUS); if (resultEnum != Status.SUCCESS) { return checkResult; } Schedule schedule = scheduleMapper.selectById(scheduleId); if (schedule == null) { putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); return result; } // Determine if the login user is the owner of the schedule if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } // check schedule is already online if (schedule.getReleaseState() == ReleaseState.ONLINE) { putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); return result; } int delete = scheduleMapper.deleteById(scheduleId); if (delete > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); } return result; } /** * preview schedule * * @param loginUser login user * @param projectName project name * @param schedule schedule expression * @return the next five fire time */ @Override public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) { Map<String, Object> result = new HashMap<>(); CronExpression cronExpression; ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); Date now = new Date(); Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); Date endTime = scheduleParam.getEndTime(); try { cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); } catch (ParseException e) { logger.error(e.getMessage(), e); putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return result; } List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString)); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,778
[Feature][JsonSplit-api]schedule list、preview、delete interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5778
https://github.com/apache/dolphinscheduler/pull/5779
72535a47e3dafc68c457996ea6e01b8da17685aa
d2a9e05a664a9bf12a29ae1873183b0173c0bf49
"2021-07-09T08:56:03Z"
java
"2021-07-12T02:21:17Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.mockito.ArgumentMatchers.isA; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.Map; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * scheduler controller test */ public class SchedulerControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); @MockBean private SchedulerService schedulerService; @Test public void testCreateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionCode","40"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(success()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/create",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testUpdateSchedule() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); paramsMap.add("schedule","{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 7 * * ? *'}"); paramsMap.add("warningType",String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId","1"); paramsMap.add("failureStrategy",String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("receivers",""); paramsMap.add("receiversCc",""); paramsMap.add("workerGroupId","1"); paramsMap.add("processInstancePriority",String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), isA(Priority.class), isA(String.class))).thenReturn(success()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/update",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOnline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","37"); Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), isA(ReleaseState.class))).thenReturn(success()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/online",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testOffline() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","28"); Mockito.when(schedulerService.setScheduleState(isA(User.class), isA(Long.class), isA(Integer.class), isA(ReleaseState.class))).thenReturn(success()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedule/offline",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionCode","40"); paramsMap.add("searchVal","test"); paramsMap.add("pageNo","1"); paramsMap.add("pageSize","30"); Map<String, Object> mockResult = success(); PageInfo<Resource> pageInfo = new PageInfo<>(1, 10); mockResult.put(Constants.DATA_LIST, pageInfo); Mockito.when(schedulerService.querySchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(Integer.class), isA(Integer.class))).thenReturn(mockResult); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/schedule/list-paging",123) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryScheduleList() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","cxc_1113") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testPreviewSchedule() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/preview","cxc_1113") .header(SESSION_ID, sessionId) .param("schedule","{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testDeleteScheduleById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("scheduleId","37"); MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/schedule/delete","cxc_1113") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,778
[Feature][JsonSplit-api]schedule list、preview、delete interface
from #5498 Change the request parameter projectName to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5778
https://github.com/apache/dolphinscheduler/pull/5779
72535a47e3dafc68c457996ea6e01b8da17685aa
d2a9e05a664a9bf12a29ae1873183b0173c0bf49
"2021-07-09T08:56:03Z"
java
"2021-07-12T02:21:17Z"
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' // delete 'definitionList' from tasks const deleteDefinitionList = (tasks) => { const newTasks = [] tasks.forEach(item => { const newItem = Object.assign({}, item) if (newItem.dependence && newItem.dependence.dependTaskList) { newItem.dependence.dependTaskList.forEach(dependTaskItem => { if (dependTaskItem.dependItemList) { dependTaskItem.dependItemList.forEach(dependItem => { Reflect.deleteProperty(dependItem, 'definitionList') }) } }) } newTasks.push(newItem) }) return newTasks } export default { /** * Task status acquisition */ getTaskState ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/task-list-by-process-id`, { processInstanceId: payload }, res => { const arr = _.map(res.data.taskList, v => { return _.cloneDeep(_.assign(tasksState[v.state], { name: v.name, stateId: v.id, dependentResult: v.dependentResult })) }) resolve({ list: arr, processInstanceState: res.data.processInstanceState, taskList: res.data.taskList }) }).catch(e => { reject(e) }) }) }, /** * Update process definition status */ editProcessState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/release`, { processId: payload.processId, releaseState: payload.releaseState }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * get process definition versions pagination info */ getProcessDefinitionVersionsPage ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/versions`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * switch process definition version */ switchProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * delete process definition version */ deleteProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Update process instance status */ editExecutorsState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/execute`, { processInstanceId: payload.processInstanceId, executeType: payload.executeType }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Verify that the DGA map name exists */ verifDAGName ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/verify-name`, { name: payload }, res => { state.name = payload resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ getProcessDetails ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/select-by-id`, { processId: payload }, res => { // process definition code state.code = res.data.code // version state.version = res.data.version // name state.name = res.data.name // description state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info state.tasks = processDefinitionJson.tasks // tasks cache state.cacheTasks = {} processDefinitionJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processDefinitionJson.globalParams // timeout state.timeout = processDefinitionJson.timeout state.tenantId = processDefinitionJson.tenantId resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process definition DAG diagram details */ copyProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/copy`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ moveProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/move`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get all the items created by the logged in user */ getAllItems ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/created-and-authorized-project', {}, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance DAG diagram details */ getInstancedetail ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-by-id`, { processInstanceId: payload }, res => { // code state.code = res.data.processDefinitionCode // version state.version = res.data.processDefinitionVersion // name state.name = res.data.name // desc state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // process instance const processInstanceJson = JSON.parse(res.data.processInstanceJson) // tasks info state.tasks = processInstanceJson.tasks // tasks cache state.cacheTasks = {} processInstanceJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processInstanceJson.globalParams // timeout state.timeout = processInstanceJson.timeout state.tenantId = processInstanceJson.tenantId // startup parameters state.startup = _.assign(state.startup, _.pick(res.data, ['commandType', 'failureStrategy', 'processInstancePriority', 'workerGroup', 'warningType', 'warningGroupId', 'receivers', 'receiversCc'])) state.startup.commandParam = JSON.parse(res.data.commandParam) resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Create process definition */ saveDAGchart ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/save`, { processDefinitionJson: JSON.stringify(data), name: _.trim(state.name), description: _.trim(state.description), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects) }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Process definition update */ updateDefinition ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/update`, { processDefinitionJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), name: _.trim(state.name), description: _.trim(state.description), id: payload, releaseState: state.releaseState }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Process instance update */ updateInstance ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: state.tasks, tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/instance/update`, { processInstanceJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), processInstanceId: payload, syncDefine: state.syncDefine }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Get a list of process definitions (sub-workflow usage is not paged) */ getProcessList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.processListS.length) { resolve() return } io.get(`projects/${state.projectName}/process/list`, payload, res => { state.processListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions (list page usage with pagination) */ getProcessListP ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/list-paging`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of project */ getProjectList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.projectListS.length) { resolve() return } io.get('projects/query-project-list', payload, res => { state.projectListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions by project id */ getProcessByProjectId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/queryProcessDefinitionAllByProjectId`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get datasource */ getDatasourceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('datasources/list', { type: payload }, res => { resolve(res) }).catch(res => { reject(res) }) }) }, /** * get resources */ getResourcesList ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListS.length) { resolve() return } io.get('resources/list', { type: 'FILE' }, res => { state.resourcesListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get jar */ getResourcesListJar ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListJar.length) { resolve() return } io.get('resources/list/jar', { type: 'FILE' }, res => { state.resourcesListJar = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process instance */ getProcessInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/list-paging`, payload, res => { state.instanceListS = res.data.totalList resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get alarm list */ getNotifyGroupList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('alert-group/list', res => { state.notifyGroupListS = _.map(res.data, v => { return { id: v.id, code: v.groupName, disabled: false } }) resolve(_.cloneDeep(state.notifyGroupListS)) }).catch(res => { reject(res) }) }) }, /** * Process definition startup interface */ processStart ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-process-instance`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * View log */ getLog ({ state }, payload) { return new Promise((resolve, reject) => { io.get('log/detail', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance id according to the process definition id * @param taskId */ getSubProcessId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-sub-process`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Called before the process definition starts */ getStartCheck ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-check`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Create timing */ createSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/create`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Preview timing */ previewSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/schedule/preview`, payload, res => { resolve(res.data) // alert(res.data) }).catch(e => { reject(e) }) }) }, /** * Timing list paging */ getScheduleList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectCode}/schedule/list-paging`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timing online */ scheduleOffline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/offline`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timed offline */ scheduleOnline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/online`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Edit timing */ updateSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/update`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete process instance */ deleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete process instance */ batchDeleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete definition */ deleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete definition */ batchDeleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * export definition */ exportDefinition ({ state }, payload) { const downloadBlob = (data, fileNameS = 'json') => { if (!data) { return } const blob = new Blob([data]) const fileName = `${fileNameS}.json` if ('download' in document.createElement('a')) { // 不是IE浏览器 const url = window.URL.createObjectURL(blob) const link = document.createElement('a') link.style.display = 'none' link.href = url link.setAttribute('download', fileName) document.body.appendChild(link) link.click() document.body.removeChild(link) // 下载完成移除元素 window.URL.revokeObjectURL(url) // 释放掉blob对象 } else { // IE 10+ window.navigator.msSaveBlob(blob, fileName) } } io.get(`projects/${state.projectName}/process/export`, { processDefinitionIds: payload.processDefinitionIds }, res => { downloadBlob(res, payload.fileName) }, e => { }, { responseType: 'blob' }) }, /** * Process instance get variable */ getViewvariables ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-variables`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get udfs function based on data source */ getUdfList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/udf-func/list', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task instance list */ getTaskInstanceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/task-instance/list-paging`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Force fail/kill/need_fault_tolerance task success */ forceTaskSuccess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/task-instance/force-success`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task record list */ getTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query history task record list */ getHistoryTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/history-list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * tree chart */ getViewTree ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/view-tree`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * gantt chart */ getViewGantt ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-gantt`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query task node list */ getProcessTasksList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/gen-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * remove timing */ deleteTiming ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/schedule/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, getResourceId ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/queryResource', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,795
[Improvement][Server] The starttime field in the HttpTask log is not displayed as expected.
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** ![image](https://user-images.githubusercontent.com/52202080/125186281-0e512680-e25c-11eb-8d5c-59d54640709d.png) ```java long costTime = System.currentTimeMillis() - startTime; logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", DateUtils.format2Readable(startTime), httpParameters.getUrl(), httpParameters.getHttpMethod(), costTime, statusCode, body, output); public static String format2Readable(long ms) { long days = MILLISECONDS.toDays(ms); long hours = MILLISECONDS.toDurationHours(ms); long minutes = MILLISECONDS.toDurationMinutes(ms); long seconds = MILLISECONDS.toDurationSeconds(ms); return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } ``` The API `format2Readable` is intended to display the execution time of a task more friendly, such as how many days and hours it has been executed. It's better to convert the timestamp to a formatted time according to a specified `DateTimeFormatter` **Which version of DolphinScheduler:** latest dev branch **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5795
https://github.com/apache/dolphinscheduler/pull/5796
16986c3c651af38469c6d4cb03a587fd174c9a9b
7bffe0ac85b0147210facdeedc531026b0022e6f
"2021-07-11T07:49:32Z"
java
"2021-07-12T06:31:48Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * date utils */ public class DateUtils { private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); private DateUtils() { throw new UnsupportedOperationException("Construct DateUtils"); } /** * date to local datetime * * @param date date * @return local datetime */ private static LocalDateTime date2LocalDateTime(Date date) { return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); } /** * local datetime to date * * @param localDateTime local datetime * @return date */ private static Date localDateTime2Date(LocalDateTime localDateTime) { Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); return Date.from(instant); } /** * get current date str * * @return date string */ public static String getCurrentTime() { return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); } /** * get the date string in the specified format of the current time * * @param format date format * @return date string */ public static String getCurrentTime(String format) { return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); } /** * get the formatted date string * * @param date date * @param format e.g. yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(Date date, String format) { return format(date2LocalDateTime(date), format); } /** * get the formatted date string * * @param localDateTime local data time * @param format yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(LocalDateTime localDateTime, String format) { return localDateTime.format(DateTimeFormatter.ofPattern(format)); } /** * convert time to yyyy-MM-dd HH:mm:ss format * * @param date date * @return date string */ public static String dateToString(Date date) { return format(date, Constants.YYYY_MM_DD_HH_MM_SS); } /** * convert string to date and time * * @param date date * @param format format * @return date */ public static Date parse(String date, String format) { try { LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); return localDateTime2Date(ldt); } catch (Exception e) { logger.error("error while parse date:" + date, e); } return null; } /** * convert date str to yyyy-MM-dd HH:mm:ss format * * @param str date string * @return yyyy-MM-dd HH:mm:ss format */ public static Date stringToDate(String str) { return parse(str, Constants.YYYY_MM_DD_HH_MM_SS); } /** * get seconds between two dates * * @param d1 date1 * @param d2 date2 * @return differ seconds */ public static long differSec(Date d1, Date d2) { if (d1 == null || d2 == null) { return 0; } return (long) Math.ceil(differMs(d1, d2) / 1000.0); } /** * get ms between two dates * * @param d1 date1 * @param d2 date2 * @return differ ms */ public static long differMs(Date d1, Date d2) { return Math.abs(d1.getTime() - d2.getTime()); } /** * get hours between two dates * * @param d1 date1 * @param d2 date2 * @return differ hours */ public static long diffHours(Date d1, Date d2) { return (long) Math.ceil(diffMin(d1, d2) / 60.0); } /** * get minutes between two dates * * @param d1 date1 * @param d2 date2 * @return differ minutes */ public static long diffMin(Date d1, Date d2) { return (long) Math.ceil(differSec(d1, d2) / 60.0); } /** * get the date of the specified date in the days before and after * * @param date date * @param day day * @return the date of the specified date in the days before and after */ public static Date getSomeDay(Date date, int day) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.DATE, day); return calendar.getTime(); } /** * get the hour of day. * * @param date date * @return hour of day */ public static int getHourIndex(Date date) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); return calendar.get(Calendar.HOUR_OF_DAY); } /** * compare two dates * * @param future future date * @param old old date * @return true if future time greater than old time */ public static boolean compare(Date future, Date old) { return future.getTime() > old.getTime(); } /** * convert schedule string to date * * @param schedule schedule * @return convert schedule string to date */ public static Date getScheduleDate(String schedule) { return stringToDate(schedule); } /** * format time to readable * * @param ms ms * @return format time */ public static String format2Readable(long ms) { long days = MILLISECONDS.toDays(ms); long hours = MILLISECONDS.toDurationHours(ms); long minutes = MILLISECONDS.toDurationMinutes(ms); long seconds = MILLISECONDS.toDurationSeconds(ms); return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } /** * * format time to duration * * @param d1 d1 * @param d2 d2 * @return format time */ public static String format2Duration(Date d1, Date d2) { if (d1 == null || d2 == null) { return null; } return format2Duration(differMs(d1, d2)); } /** * format time to duration * * @param ms ms * @return format time */ public static String format2Duration(long ms) { long days = MILLISECONDS.toDays(ms); long hours = MILLISECONDS.toDurationHours(ms); long minutes = MILLISECONDS.toDurationMinutes(ms); long seconds = MILLISECONDS.toDurationSeconds(ms); StringBuilder strBuilder = new StringBuilder(); strBuilder = days > 0 ? strBuilder.append(days).append("d").append(" ") : strBuilder; strBuilder = hours > 0 ? strBuilder.append(hours).append("h").append(" ") : strBuilder; strBuilder = minutes > 0 ? strBuilder.append(minutes).append("m").append(" ") : strBuilder; strBuilder = seconds > 0 ? strBuilder.append(seconds).append("s") : strBuilder; return strBuilder.toString(); } /** * get monday * <p> * note: Set the first day of the week to Monday, the default is Sunday * * @param date date * @return get monday */ public static Date getMonday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); return cal.getTime(); } /** * get sunday * <p> * note: Set the first day of the week to Monday, the default is Sunday * * @param date date * @return get sunday */ public static Date getSunday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY); return cal.getTime(); } /** * get first day of month * * @param date date * @return first day of month */ public static Date getFirstDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.DAY_OF_MONTH, 1); return cal.getTime(); } /** * get some hour of day * * @param date date * @param offsetHour hours * @return some hour of day */ public static Date getSomeHourOfDay(Date date, int offsetHour) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + offsetHour); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * get last day of month * * @param date date * @return get last day of month */ public static Date getLastDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.MONTH, 1); cal.set(Calendar.DAY_OF_MONTH, 1); cal.add(Calendar.DAY_OF_MONTH, -1); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay date * @return start day */ public static Date getStartOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of day */ public static Date getEndOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 23); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay day * @return start of hour */ public static Date getStartOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of hour */ public static Date getEndOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * get current date * * @return current date */ public static Date getCurrentDate() { return DateUtils.parse(DateUtils.getCurrentTime(), Constants.YYYY_MM_DD_HH_MM_SS); } /** * get date * * @param date date * @param calendarField calendarField * @param amount amount * @return date */ public static Date add(final Date date, final int calendarField, final int amount) { if (date == null) { throw new IllegalArgumentException("The date must not be null"); } final Calendar c = Calendar.getInstance(); c.setTime(date); c.add(calendarField, amount); return c.getTime(); } /** * starting from the current time, get how many seconds are left before the target time. * targetTime = baseTime + intervalSeconds * * @param baseTime base time * @param intervalSeconds a period of time * @return the number of seconds */ public static long getRemainTime(Date baseTime, long intervalSeconds) { if (baseTime == null) { return 0; } long usedTime = (System.currentTimeMillis() - baseTime.getTime()) / 1000; return intervalSeconds - usedTime; } /** * get current time stamp : yyyyMMddHHmmssSSS * * @return date string */ public static String getCurrentTimeStamp() { return getCurrentTime(Constants.YYYYMMDDHHMMSSSSS); } /** * transform date to target timezone date * <p>e.g. * <p> if input date is 2020-01-01 00:00:00 current timezone is CST * <p>targetTimezoneId is MST * <p>this method will return 2020-01-01 15:00:00 */ public static Date getTimezoneDate(Date date, String targetTimezoneId) { if (StringUtils.isEmpty(targetTimezoneId)) { return date; } String dateToString = dateToString(date); LocalDateTime localDateTime = LocalDateTime.parse(dateToString, DateTimeFormatter.ofPattern(Constants.YYYY_MM_DD_HH_MM_SS)); ZonedDateTime zonedDateTime = ZonedDateTime.of(localDateTime, TimeZone.getTimeZone(targetTimezoneId).toZoneId()); return Date.from(zonedDateTime.toInstant()); } /** * get timezone by timezoneId */ public static TimeZone getTimezone(String timezoneId) { if (StringUtils.isEmpty(timezoneId)) { return null; } return TimeZone.getTimeZone(timezoneId); } static final long C0 = 1L; static final long C1 = C0 * 1000L; static final long C2 = C1 * 1000L; static final long C3 = C2 * 1000L; static final long C4 = C3 * 60L; static final long C5 = C4 * 60L; static final long C6 = C5 * 24L; /** * Time unit representing one thousandth of a second */ public static class MILLISECONDS { public static long toSeconds(long d) { return d / (C3 / C2); } public static long toMinutes(long d) { return d / (C4 / C2); } public static long toHours(long d) { return d / (C5 / C2); } public static long toDays(long d) { return d / (C6 / C2); } public static long toDurationSeconds(long d) { return (d % (C4 / C2)) / (C3 / C2); } public static long toDurationMinutes(long d) { return (d % (C5 / C2)) / (C4 / C2); } public static long toDurationHours(long d) { return (d % (C6 / C2)) / (C5 / C2); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,795
[Improvement][Server] The starttime field in the HttpTask log is not displayed as expected.
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** ![image](https://user-images.githubusercontent.com/52202080/125186281-0e512680-e25c-11eb-8d5c-59d54640709d.png) ```java long costTime = System.currentTimeMillis() - startTime; logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", DateUtils.format2Readable(startTime), httpParameters.getUrl(), httpParameters.getHttpMethod(), costTime, statusCode, body, output); public static String format2Readable(long ms) { long days = MILLISECONDS.toDays(ms); long hours = MILLISECONDS.toDurationHours(ms); long minutes = MILLISECONDS.toDurationMinutes(ms); long seconds = MILLISECONDS.toDurationSeconds(ms); return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } ``` The API `format2Readable` is intended to display the execution time of a task more friendly, such as how many days and hours it has been executed. It's better to convert the timestamp to a formatted time according to a specified `DateTimeFormatter` **Which version of DolphinScheduler:** latest dev branch **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5795
https://github.com/apache/dolphinscheduler/pull/5796
16986c3c651af38469c6d4cb03a587fd174c9a9b
7bffe0ac85b0147210facdeedc531026b0022e6f
"2021-07-11T07:49:32Z"
java
"2021-07-12T06:31:48Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DateUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; import org.junit.Assert; import org.junit.Test; public class DateUtilsTest { @Test public void format2Readable() throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String start = "2015-12-21 18:00:36"; Date startDate = sdf.parse(start); String end = "2015-12-23 03:23:44"; Date endDate = sdf.parse(end); String readableDate = DateUtils.format2Readable(endDate.getTime() - startDate.getTime()); Assert.assertEquals("01 09:23:08", readableDate); } @Test public void testWeek() { Date curr = DateUtils.stringToDate("2019-02-01 00:00:00"); Date monday1 = DateUtils.stringToDate("2019-01-28 00:00:00"); Date sunday1 = DateUtils.stringToDate("2019-02-03 00:00:00"); Date monday = DateUtils.getMonday(curr); Date sunday = DateUtils.getSunday(monday); Assert.assertEquals(monday, monday1); Assert.assertEquals(sunday, sunday1); } @Test public void diffHours() { Date d1 = DateUtils.stringToDate("2019-01-28 00:00:00"); Date d2 = DateUtils.stringToDate("2019-01-28 20:00:00"); Assert.assertEquals(DateUtils.diffHours(d1, d2), 20); Date d3 = DateUtils.stringToDate("2019-01-28 20:00:00"); Assert.assertEquals(DateUtils.diffHours(d3, d2), 0); Assert.assertEquals(DateUtils.diffHours(d2, d1), 20); Date d4 = null; Assert.assertEquals(DateUtils.diffHours(d2, d4), 0); } @Test public void dateToString() { Date d1 = DateUtils.stringToDate("2019-01-28"); Assert.assertNull(d1); d1 = DateUtils.stringToDate("2019-01-28 00:00:00"); Assert.assertEquals(DateUtils.dateToString(d1), "2019-01-28 00:00:00"); } @Test public void getSomeDay() { Date d1 = DateUtils.stringToDate("2019-01-31 00:00:00"); Date curr = DateUtils.getSomeDay(d1, 1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-02-01 00:00:00"); Assert.assertEquals(DateUtils.dateToString(DateUtils.getSomeDay(d1, -31)), "2018-12-31 00:00:00"); } @Test public void getFirstDayOfMonth() { Date d1 = DateUtils.stringToDate("2019-01-31 00:00:00"); Date curr = DateUtils.getFirstDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-01 00:00:00"); d1 = DateUtils.stringToDate("2019-01-31 01:59:00"); curr = DateUtils.getFirstDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-01 01:59:00"); } @Test public void getSomeHourOfDay() { Date d1 = DateUtils.stringToDate("2019-01-31 11:59:59"); Date curr = DateUtils.getSomeHourOfDay(d1, -1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 10:00:00"); curr = DateUtils.getSomeHourOfDay(d1, 0); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 11:00:00"); curr = DateUtils.getSomeHourOfDay(d1, 2); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 13:00:00"); curr = DateUtils.getSomeHourOfDay(d1, 24); Assert.assertEquals(DateUtils.dateToString(curr), "2019-02-01 11:00:00"); } @Test public void getLastDayOfMonth() { Date d1 = DateUtils.stringToDate("2019-01-31 11:59:59"); Date curr = DateUtils.getLastDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 11:59:59"); d1 = DateUtils.stringToDate("2019-01-02 11:59:59"); curr = DateUtils.getLastDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 11:59:59"); d1 = DateUtils.stringToDate("2019-02-02 11:59:59"); curr = DateUtils.getLastDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-02-28 11:59:59"); d1 = DateUtils.stringToDate("2020-02-02 11:59:59"); curr = DateUtils.getLastDayOfMonth(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2020-02-29 11:59:59"); } @Test public void getStartOfDay() { Date d1 = DateUtils.stringToDate("2019-01-31 11:59:59"); Date curr = DateUtils.getStartOfDay(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 00:00:00"); } @Test public void getEndOfDay() { Date d1 = DateUtils.stringToDate("2019-01-31 11:00:59"); Date curr = DateUtils.getEndOfDay(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 23:59:59"); } @Test public void getStartOfHour() { Date d1 = DateUtils.stringToDate("2019-01-31 11:00:59"); Date curr = DateUtils.getStartOfHour(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 11:00:00"); } @Test public void getEndOfHour() { Date d1 = DateUtils.stringToDate("2019-01-31 11:00:59"); Date curr = DateUtils.getEndOfHour(d1); Assert.assertEquals(DateUtils.dateToString(curr), "2019-01-31 11:59:59"); } @Test public void getCurrentTimeStamp() { String timeStamp = DateUtils.getCurrentTimeStamp(); Assert.assertNotNull(timeStamp); } @Test public void testFormat2Duration() { // days hours minutes seconds Date d1 = DateUtils.stringToDate("2020-01-20 11:00:00"); Date d2 = DateUtils.stringToDate("2020-01-21 12:10:10"); String duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("1d 1h 10m 10s", duration); // hours minutes seconds d1 = DateUtils.stringToDate("2020-01-20 11:00:00"); d2 = DateUtils.stringToDate("2020-01-20 12:10:10"); duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("1h 10m 10s", duration); // minutes seconds d1 = DateUtils.stringToDate("2020-01-20 11:00:00"); d2 = DateUtils.stringToDate("2020-01-20 11:10:10"); duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("10m 10s", duration); // minutes seconds d1 = DateUtils.stringToDate("2020-01-20 11:10:00"); d2 = DateUtils.stringToDate("2020-01-20 11:10:10"); duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("10s", duration); d1 = DateUtils.stringToDate("2020-01-20 11:10:00"); d2 = DateUtils.stringToDate("2020-01-21 11:10:10"); duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("1d 10s", duration); d1 = DateUtils.stringToDate("2020-01-20 11:10:00"); d2 = DateUtils.stringToDate("2020-01-20 16:10:10"); duration = DateUtils.format2Duration(d2, d1); Assert.assertEquals("5h 10s", duration); } @Test public void testNullDuration() { // days hours minutes seconds Date d1 = DateUtils.stringToDate("2020-01-20 11:00:00"); Date d2 = null; Assert.assertNull(DateUtils.format2Duration(d1, d2)); } @Test public void testTransformToTimezone() { Date date = new Date(); Date mst = DateUtils.getTimezoneDate(date, TimeZone.getDefault().getID()); Assert.assertEquals(DateUtils.dateToString(date), DateUtils.dateToString(mst)); } @Test public void testGetTimezone() { Assert.assertNull(DateUtils.getTimezone(null)); Assert.assertEquals(TimeZone.getTimeZone("MST"), DateUtils.getTimezone("MST")); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,795
[Improvement][Server] The starttime field in the HttpTask log is not displayed as expected.
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** ![image](https://user-images.githubusercontent.com/52202080/125186281-0e512680-e25c-11eb-8d5c-59d54640709d.png) ```java long costTime = System.currentTimeMillis() - startTime; logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", DateUtils.format2Readable(startTime), httpParameters.getUrl(), httpParameters.getHttpMethod(), costTime, statusCode, body, output); public static String format2Readable(long ms) { long days = MILLISECONDS.toDays(ms); long hours = MILLISECONDS.toDurationHours(ms); long minutes = MILLISECONDS.toDurationMinutes(ms); long seconds = MILLISECONDS.toDurationSeconds(ms); return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } ``` The API `format2Readable` is intended to display the execution time of a task more friendly, such as how many days and hours it has been executed. It's better to convert the timestamp to a formatted time according to a specified `DateTimeFormatter` **Which version of DolphinScheduler:** latest dev branch **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5795
https://github.com/apache/dolphinscheduler/pull/5796
16986c3c651af38469c6d4cb03a587fd174c9a9b
7bffe0ac85b0147210facdeedc531026b0022e6f
"2021-07-11T07:49:32Z"
java
"2021-07-12T06:31:48Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.http; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.io.Charsets; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.HttpMethod; import org.apache.dolphinscheduler.common.enums.HttpParametersType; import org.apache.dolphinscheduler.common.process.HttpProperty; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.http.HttpParameters; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.http.HttpEntity; import org.apache.http.ParseException; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.methods.RequestBuilder; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * http task */ public class HttpTask extends AbstractTask { /** * http parameters */ private HttpParameters httpParameters; /** * application json */ protected static final String APPLICATION_JSON = "application/json"; /** * output */ protected String output; /** * taskExecutionContext */ private TaskExecutionContext taskExecutionContext; /** * constructor * @param taskExecutionContext taskExecutionContext * @param logger logger */ public HttpTask(TaskExecutionContext taskExecutionContext, Logger logger) { super(taskExecutionContext, logger); this.taskExecutionContext = taskExecutionContext; } @Override public void init() { logger.info("http task params {}", taskExecutionContext.getTaskParams()); this.httpParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), HttpParameters.class); if (!httpParameters.checkParameters()) { throw new RuntimeException("http task params is not valid"); } } @Override public void handle() throws Exception { String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); long startTime = System.currentTimeMillis(); String statusCode = null; String body = null; try(CloseableHttpClient client = createHttpClient(); CloseableHttpResponse response = sendRequest(client)) { statusCode = String.valueOf(getStatusCode(response)); body = getResponseBody(response); exitStatusCode = validResponse(body, statusCode); long costTime = System.currentTimeMillis() - startTime; logger.info("startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {}Millisecond, statusCode : {}, body : {}, log : {}", DateUtils.format2Readable(startTime), httpParameters.getUrl(),httpParameters.getHttpMethod(), costTime, statusCode, body, output); }catch (Exception e){ appendMessage(e.toString()); exitStatusCode = -1; logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:"+output, e); throw e; } } /** * send request * @param client client * @return CloseableHttpResponse * @throws IOException io exception */ protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException { RequestBuilder builder = createRequestBuilder(); // replace placeholder Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), taskExecutionContext.getDefinedParams(), httpParameters.getLocalParametersMap(), httpParameters.getVarPoolMap(), CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), taskExecutionContext.getScheduleTime()); List<HttpProperty> httpPropertyList = new ArrayList<>(); if(CollectionUtils.isNotEmpty(httpParameters.getHttpParams() )){ for (HttpProperty httpProperty: httpParameters.getHttpParams()) { String jsonObject = JSONUtils.toJsonString(httpProperty); String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap)); logger.info("http request params:{}",params); httpPropertyList.add(JSONUtils.parseObject(params,HttpProperty.class)); } } addRequestParams(builder,httpPropertyList); String requestUrl = ParameterUtils.convertParameterPlaceholders(httpParameters.getUrl(),ParamUtils.convert(paramsMap)); HttpUriRequest request = builder.setUri(requestUrl).build(); setHeaders(request,httpPropertyList); return client.execute(request); } /** * get response body * @param httpResponse http response * @return response body * @throws ParseException parse exception * @throws IOException io exception */ protected String getResponseBody(CloseableHttpResponse httpResponse) throws ParseException, IOException { if (httpResponse == null) { return null; } HttpEntity entity = httpResponse.getEntity(); if (entity == null) { return null; } return EntityUtils.toString(entity, StandardCharsets.UTF_8.name()); } /** * get status code * @param httpResponse http response * @return status code */ protected int getStatusCode(CloseableHttpResponse httpResponse) { return httpResponse.getStatusLine().getStatusCode(); } /** * valid response * @param body body * @param statusCode status code * @return exit status code */ protected int validResponse(String body, String statusCode){ int exitStatusCode = 0; switch (httpParameters.getHttpCheckCondition()) { case BODY_CONTAINS: if (StringUtils.isEmpty(body) || !body.contains(httpParameters.getCondition())) { appendMessage(httpParameters.getUrl() + " doesn contain " + httpParameters.getCondition()); exitStatusCode = -1; } break; case BODY_NOT_CONTAINS: if (StringUtils.isEmpty(body) || body.contains(httpParameters.getCondition())) { appendMessage(httpParameters.getUrl() + " contains " + httpParameters.getCondition()); exitStatusCode = -1; } break; case STATUS_CODE_CUSTOM: if (!statusCode.equals(httpParameters.getCondition())) { appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: " + httpParameters.getCondition()); exitStatusCode = -1; } break; default: if (!"200".equals(statusCode)) { appendMessage(httpParameters.getUrl() + " statuscode: " + statusCode + ", Must be: 200"); exitStatusCode = -1; } break; } return exitStatusCode; } public String getOutput() { return output; } /** * append message * @param message message */ protected void appendMessage(String message) { if (output == null) { output = ""; } if (message != null && !message.trim().isEmpty()) { output += message; } } /** * add request params * @param builder buidler * @param httpPropertyList http property list */ protected void addRequestParams(RequestBuilder builder,List<HttpProperty> httpPropertyList) { if(CollectionUtils.isNotEmpty(httpPropertyList)){ ObjectNode jsonParam = JSONUtils.createObjectNode(); for (HttpProperty property: httpPropertyList){ if(property.getHttpParametersType() != null){ if (property.getHttpParametersType().equals(HttpParametersType.PARAMETER)){ builder.addParameter(property.getProp(), property.getValue()); }else if(property.getHttpParametersType().equals(HttpParametersType.BODY)){ jsonParam.put(property.getProp(), property.getValue()); } } } StringEntity postingString = new StringEntity(jsonParam.toString(), Charsets.UTF_8); postingString.setContentEncoding(StandardCharsets.UTF_8.name()); postingString.setContentType(APPLICATION_JSON); builder.setEntity(postingString); } } /** * set headers * @param request request * @param httpPropertyList http property list */ protected void setHeaders(HttpUriRequest request,List<HttpProperty> httpPropertyList) { if(CollectionUtils.isNotEmpty(httpPropertyList)){ for (HttpProperty property: httpPropertyList) { if (HttpParametersType.HEADERS.equals(property.getHttpParametersType())) { request.addHeader(property.getProp(), property.getValue()); } } } } /** * create http client * @return CloseableHttpClient */ protected CloseableHttpClient createHttpClient() { final RequestConfig requestConfig = requestConfig(); HttpClientBuilder httpClientBuilder; httpClientBuilder = HttpClients.custom().setDefaultRequestConfig(requestConfig); return httpClientBuilder.build(); } /** * request config * @return RequestConfig */ private RequestConfig requestConfig() { return RequestConfig.custom().setSocketTimeout(httpParameters.getSocketTimeout()).setConnectTimeout(httpParameters.getConnectTimeout()).build(); } /** * create request builder * @return RequestBuilder */ protected RequestBuilder createRequestBuilder() { if (httpParameters.getHttpMethod().equals(HttpMethod.GET)) { return RequestBuilder.get(); } else if (httpParameters.getHttpMethod().equals(HttpMethod.POST)) { return RequestBuilder.post(); } else if (httpParameters.getHttpMethod().equals(HttpMethod.HEAD)) { return RequestBuilder.head(); } else if (httpParameters.getHttpMethod().equals(HttpMethod.PUT)) { return RequestBuilder.put(); } else if (httpParameters.getHttpMethod().equals(HttpMethod.DELETE)) { return RequestBuilder.delete(); } else { return null; } } @Override public AbstractParameters getParameters() { return this.httpParameters; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,621
[Improvement][job instance]start-time and end-time
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** 工作流实例列表中,如果多个工作流存在依赖关系,最后的工作流任务时长是累计时长,还有,这个排序好像只是按照开始时间排序的,希望能做成开始时间一样,按照结束时间再排一下. **What are the current deficiencies and the benefits of improvement** - A clear and concise description of the current deficiencies and the benefits of this improvement. **Which version of DolphinScheduler:** -[1.3.6] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5621
https://github.com/apache/dolphinscheduler/pull/5797
7bffe0ac85b0147210facdeedc531026b0022e6f
70fef3daed5cb5add90dd3ada3d753533dea59ae
"2021-06-10T13:43:25Z"
java
"2021-07-12T06:36:09Z"
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml
<?xml version="1.0" encoding="UTF-8" ?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" > <mapper namespace="org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper"> <sql id="baseSql"> id, name, process_definition_version, process_definition_code, state, recovery, start_time, end_time, run_times,host, command_type, command_param, task_depend_type, max_try_times, failure_strategy, warning_type, warning_group_id, schedule_time, command_start_time, global_params, flag, update_time, is_sub_process, executor_id, history_cmd, process_instance_priority, worker_group, timeout, tenant_id, var_pool </sql> <select id="queryDetailById" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where id = #{processId} </select> <select id="queryByHostAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="host != null and host != ''"> and host=#{host} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryTopNProcessInstance" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where state = #{status} and start_time between #{startTime} and #{endTime} order by end_time-start_time desc limit #{size} </select> <select id="queryByTenantIdAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="tenantId != -1"> and tenant_id =#{tenantId} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryByWorkerGroupNameAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="workerGroupName != ''"> and worker_group =#{workerGroupName} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryProcessInstanceListPaging" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select instance.id, instance.command_type, instance.executor_id, instance.process_definition_version, instance.process_definition_code, instance.name, instance.state, instance.schedule_time, instance.start_time, instance.end_time, instance.run_times, instance.recovery, instance.host from t_ds_process_instance instance join t_ds_process_definition define ON instance.process_definition_code = define.code where instance.is_sub_process=0 and define.project_code = #{projectCode} <if test="processDefinitionCode != 0"> and instance.process_definition_code = #{processDefinitionCode} </if> <if test="searchVal != null and searchVal != ''"> and instance.name like concat('%', #{searchVal}, '%') </if> <if test="startTime != null "> and instance.start_time > #{startTime} and instance.start_time <![CDATA[ <=]]> #{endTime} </if> <if test="states != null and states.length > 0"> and instance.state in <foreach collection="states" index="index" item="i" open="(" separator="," close=")"> #{i} </foreach> </if> <if test="host != null and host != ''"> and instance.host like concat('%', #{host}, '%') </if> <if test="executorId != 0"> and instance.executor_id = #{executorId} </if> order by instance.start_time desc </select> <update id="setFailoverByHostAndStateArray"> update t_ds_process_instance set host=null where host =#{host} and state in <foreach collection="states" index="index" item="i" open="(" close=")" separator=","> #{i} </foreach> </update> <update id="updateProcessInstanceByState"> update t_ds_process_instance set state = #{destState} where state = #{originState} </update> <update id="updateProcessInstanceByTenantId"> update t_ds_process_instance set tenant_id = #{destTenantId} where tenant_id = #{originTenantId} </update> <update id="updateProcessInstanceByWorkerGroupName"> update t_ds_process_instance set worker_group = #{destWorkerGroupName} where worker_group = #{originWorkerGroupName} </update> <select id="countInstanceStateByUser" resultType="org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount"> select t.state, count(0) as count from t_ds_process_instance t join t_ds_process_definition d on d.code=t.process_definition_code join t_ds_project p on p.code=d.project_code where 1 = 1 and t.is_sub_process = 0 <if test="startTime != null and endTime != null"> and t.start_time <![CDATA[ >= ]]> #{startTime} and t.start_time <![CDATA[ <= ]]> #{endTime} </if> <if test="projectCodes != null and projectCodes.length != 0"> and p.code in <foreach collection="projectCodes" index="index" item="i" open="(" close=")" separator=","> #{i} </foreach> </if> group by t.state </select> <select id="queryByProcessDefineCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} order by start_time desc limit #{size} </select> <select id="queryLastSchedulerProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} <if test="startTime!=null and endTime != null "> and schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime} </if> order by end_time desc limit 1 </select> <select id="queryLastRunningProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} <if test="states !=null and states.length != 0"> and state in <foreach collection="states" item="i" index="index" open="(" separator="," close=")"> #{i} </foreach> </if> <if test="startTime!=null and endTime != null "> and (schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime} or start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime}) </if> order by start_time desc limit 1 </select> <select id="queryLastManualProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} and schedule_time is null <if test="startTime!=null and endTime != null "> and start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime} </if> order by end_time desc limit 1 </select> <select id="queryByProcessDefineCodeAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <update id="updateGlobalParamsById"> update t_ds_process_instance set global_params = #{globalParams} where id = #{id} </update> </mapper>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,699
[Bug][UI] Update user error in user information
**To Reproduce** User Infomation Page-> Click Edit Buttom -> popup Edit User Dalog -> Click Edit Button to finish the edit action. **Expected behavior** Bug fixed **Screenshots** ![image](https://user-images.githubusercontent.com/52202080/121630957-c0a49b00-cab0-11eb-870b-619ecc2f5d4a.png) **Which version of Dolphin Scheduler:** -[1.3.6] -[dev]
https://github.com/apache/dolphinscheduler/issues/5699
https://github.com/apache/dolphinscheduler/pull/5735
596821a5b691622487863d1e53a147453d435ab7
00e76558be001dc72cf60f4db93c881ed98db95a
"2021-06-26T03:56:38Z"
java
"2021-07-14T05:49:21Z"
dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/index.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <m-list-construction :title="$t('User Manage')"> <template slot="conditions"> <m-conditions @on-conditions="_onConditions"> <template slot="button-group" v-if="userList.length"> <el-button size="mini" @click="_create('')">{{$t('Create User')}}</el-button> <el-dialog :title="item ? $t('Edit User') : $t('Create User')" v-if="createUserDialog" :visible.sync="createUserDialog" width="auto"> <m-create-user :item="item" @onUpdate="onUpdate" @close="close"></m-create-user> </el-dialog> </template> </m-conditions> </template> <template slot="content"> <template v-if="userList.length || total>0"> <m-list @on-edit="_onEdit" @on-update="_onUpdate" :user-list="userList" :page-no="searchParams.pageNo" :page-size="searchParams.pageSize"> </m-list> <div class="page-box"> <el-pagination background @current-change="_page" @size-change="_pageSize" :page-size="searchParams.pageSize" :current-page.sync="searchParams.pageNo" :page-sizes="[10, 30, 50]" layout="sizes, prev, pager, next, jumper" :total="total"> </el-pagination> </div> </template> <template v-if="!userList.length && total<=0"> <m-no-data></m-no-data> </template> <m-spin :is-spin="isLoading" :is-left="isLeft"></m-spin> </template> </m-list-construction> </template> <script> import _ from 'lodash' import { mapActions, mapMutations } from 'vuex' import mList from './_source/list' import mCreateUser from './_source/createUser' import mSpin from '@/module/components/spin/spin' import mNoData from '@/module/components/noData/noData' import listUrlParamHandle from '@/module/mixin/listUrlParamHandle' import mConditions from '@/module/components/conditions/conditions' import mListConstruction from '@/module/components/listConstruction/listConstruction' export default { name: 'users-index', data () { return { total: null, isLoading: true, userList: [], searchParams: { pageSize: 10, pageNo: 1, searchVal: '' }, isLeft: true, createUserDialog: false, item: {} } }, mixins: [listUrlParamHandle], props: {}, methods: { ...mapMutations('user', ['setUserInfo']), ...mapActions('security', ['getUsersListP']), /** * Query */ _onConditions (o) { this.searchParams = _.assign(this.searchParams, o) this.searchParams.pageNo = 1 }, _page (val) { this.searchParams.pageNo = val }, _pageSize (val) { this.searchParams.pageSize = val }, _onUpdate () { this._debounceGET() }, _onEdit (item) { this._create(item) }, _create (item) { this.item = item this.createUserDialog = true }, onUpdate (param) { this._debounceGET('false') this.setUserInfo(param) this.createUserDialog = false }, close () { this.createUserDialog = false }, _getList (flag) { if (sessionStorage.getItem('isLeft') === 0) { this.isLeft = false } else { this.isLeft = true } this.isLoading = !flag this.getUsersListP(this.searchParams).then(res => { if (this.searchParams.pageNo > 1 && res.totalList.length === 0) { this.searchParams.pageNo = this.searchParams.pageNo - 1 } else { this.userList = [] this.userList = res.totalList this.total = res.total this.isLoading = false } }).catch(e => { this.isLoading = false }) } }, watch: { // router '$route' (a) { // url no params get instance list this.searchParams.pageNo = _.isEmpty(a.query) ? 1 : a.query.pageNo } }, created () { }, mounted () { }, beforeDestroy () { sessionStorage.setItem('isLeft', 1) }, components: { mList, mListConstruction, mConditions, mSpin, mNoData, mCreateUser } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.common.datasource.ConnectionParam; import org.apache.dolphinscheduler.common.datasource.DatasourceUtil; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; import java.sql.Connection; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.fasterxml.jackson.databind.node.ObjectNode; /** * data source service impl */ @Service public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService { private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class); @Autowired private DataSourceMapper dataSourceMapper; @Autowired private DataSourceUserMapper datasourceUserMapper; /** * create data source * * @param loginUser login user * @param datasourceParam datasource parameters * @return create result code */ @Override public Result<Object> createDataSource(User loginUser, BaseDataSourceParamDTO datasourceParam) { DatasourceUtil.checkDatasourceParam(datasourceParam); Result<Object> result = new Result<>(); // check name can use or not if (checkName(datasourceParam.getName())) { putMsg(result, Status.DATASOURCE_EXIST); return result; } // check connect ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(datasourceParam); Result<Object> isConnection = checkConnection(datasourceParam.getType(), connectionParam); if (Status.SUCCESS.getCode() != isConnection.getCode()) { putMsg(result, Status.DATASOURCE_CONNECT_FAILED); return result; } // build datasource DataSource dataSource = new DataSource(); Date now = new Date(); dataSource.setName(datasourceParam.getName().trim()); dataSource.setNote(datasourceParam.getNote()); dataSource.setUserId(loginUser.getId()); dataSource.setUserName(loginUser.getUserName()); dataSource.setType(datasourceParam.getType()); dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam)); dataSource.setCreateTime(now); dataSource.setUpdateTime(now); try { dataSourceMapper.insert(dataSource); putMsg(result, Status.SUCCESS); } catch (DuplicateKeyException ex) { logger.error("Create datasource error.", ex); putMsg(result, Status.DATASOURCE_EXIST); } return result; } /** * updateProcessInstance datasource * * @param loginUser login user * @param name data source name * @param desc data source description * @param type data source type * @param parameter datasource parameters * @param id data source id * @return update result code */ @Override public Result<Object> updateDataSource(int id, User loginUser, BaseDataSourceParamDTO dataSourceParam) { DatasourceUtil.checkDatasourceParam(dataSourceParam); Result<Object> result = new Result<>(); // determine whether the data source exists DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, dataSource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } //check name can use or not if (!dataSource.getName().trim().equals(dataSource.getName()) && checkName(dataSource.getName())) { putMsg(result, Status.DATASOURCE_EXIST); return result; } //check password,if the password is not updated, set to the old password. BaseConnectionParam connectionParam = (BaseConnectionParam) DatasourceUtil.buildConnectionParams(dataSourceParam); String password = connectionParam.getPassword(); if (StringUtils.isBlank(password)) { String oldConnectionParams = dataSource.getConnectionParams(); ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams); connectionParam.setPassword(oldParams.path(Constants.PASSWORD).asText()); } Result<Object> isConnection = checkConnection(dataSource.getType(), connectionParam); if (isConnection.isFailed()) { return isConnection; } Date now = new Date(); dataSource.setName(dataSource.getName().trim()); dataSource.setNote(dataSourceParam.getNote()); dataSource.setUserName(loginUser.getUserName()); dataSource.setType(dataSource.getType()); dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam)); dataSource.setUpdateTime(now); try { dataSourceMapper.updateById(dataSource); putMsg(result, Status.SUCCESS); } catch (DuplicateKeyException ex) { logger.error("Update datasource error.", ex); putMsg(result, Status.DATASOURCE_EXIST); } return result; } private boolean checkName(String name) { List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); return queryDataSource != null && !queryDataSource.isEmpty(); } /** * updateProcessInstance datasource * * @param id datasource id * @return data source detail */ @Override public Map<String, Object> queryDataSource(int id) { Map<String, Object> result = new HashMap<>(); DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } // type BaseDataSourceParamDTO baseDataSourceParamDTO = DatasourceUtil.buildDatasourceParamDTO( dataSource.getType(), dataSource.getConnectionParams()); baseDataSourceParamDTO.setId(dataSource.getId()); baseDataSourceParamDTO.setName(dataSource.getName()); baseDataSourceParamDTO.setNote(dataSource.getNote()); result.put(Constants.DATA_LIST, baseDataSourceParamDTO); putMsg(result, Status.SUCCESS); return result; } /** * query datasource list by keyword * * @param loginUser login user * @param searchVal search value * @param pageNo page number * @param pageSize page size * @return data source list page */ @Override public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> result = new HashMap<>(); IPage<DataSource> dataSourceList; Page<DataSource> dataSourcePage = new Page<>(pageNo, pageSize); if (isAdmin(loginUser)) { dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); } else { dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); } List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>(); handlePasswd(dataSources); PageInfo<DataSource> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L)); pageInfo.setLists(dataSources); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * handle datasource connection password for safety */ private void handlePasswd(List<DataSource> dataSourceList) { for (DataSource dataSource : dataSourceList) { String connectionParams = dataSource.getConnectionParams(); ObjectNode object = JSONUtils.parseObject(connectionParams); object.put(Constants.PASSWORD, getHiddenPassword()); dataSource.setConnectionParams(object.toString()); } } /** * get hidden password (resolve the security hotspot) * * @return hidden password */ private String getHiddenPassword() { return Constants.XXXXXX; } /** * query data resource list * * @param loginUser login user * @param type data source type * @return data source list page */ @Override public Map<String, Object> queryDataSourceList(User loginUser, Integer type) { Map<String, Object> result = new HashMap<>(); List<DataSource> datasourceList; if (isAdmin(loginUser)) { datasourceList = dataSourceMapper.listAllDataSourceByType(type); } else { datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); } result.put(Constants.DATA_LIST, datasourceList); putMsg(result, Status.SUCCESS); return result; } /** * verify datasource exists * * @param name datasource name * @return true if data datasource not exists, otherwise return false */ @Override public Result<Object> verifyDataSourceName(String name) { Result<Object> result = new Result<>(); List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name); if (dataSourceList != null && !dataSourceList.isEmpty()) { putMsg(result, Status.DATASOURCE_EXIST); } else { putMsg(result, Status.SUCCESS); } return result; } /** * check connection * * @param type data source type * @param parameter data source parameters * @return true if connect successfully, otherwise false */ @Override public Result<Object> checkConnection(DbType type, ConnectionParam connectionParam) { Result<Object> result = new Result<>(); try (Connection connection = DatasourceUtil.getConnection(type, connectionParam)) { if (connection == null) { putMsg(result, Status.CONNECTION_TEST_FAILURE); return result; } putMsg(result, Status.SUCCESS); return result; } catch (Exception e) { logger.error("datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type, connectionParam, e.getMessage()); return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), e.getMessage()); } } /** * test connection * * @param id datasource id * @return connect result code */ @Override public Result<Object> connectionTest(int id) { DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { Result<Object> result = new Result<>(); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } return checkConnection(dataSource.getType(), DatasourceUtil.buildConnectionParams(dataSource.getType(), dataSource.getConnectionParams())); } /** * delete datasource * * @param loginUser login user * @param datasourceId data source id * @return delete result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Result<Object> delete(User loginUser, int datasourceId) { Result<Object> result = new Result<>(); try { //query datasource by id DataSource dataSource = dataSourceMapper.selectById(datasourceId); if (dataSource == null) { logger.error("resource id {} not exist", datasourceId); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, dataSource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } dataSourceMapper.deleteById(datasourceId); datasourceUserMapper.deleteByDatasourceId(datasourceId); putMsg(result, Status.SUCCESS); } catch (Exception e) { logger.error("delete datasource error", e); throw new RuntimeException("delete datasource error"); } return result; } /** * unauthorized datasource * * @param loginUser login user * @param userId user id * @return unauthed data source result code */ @Override public Map<String, Object> unauthDatasource(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); //only admin operate if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } /** * query all data sources except userId */ List<DataSource> resultList = new ArrayList<>(); List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); Set<DataSource> datasourceSet = null; if (datasourceList != null && !datasourceList.isEmpty()) { datasourceSet = new HashSet<>(datasourceList); List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); Set<DataSource> authedDataSourceSet = null; if (authedDataSourceList != null && !authedDataSourceList.isEmpty()) { authedDataSourceSet = new HashSet<>(authedDataSourceList); datasourceSet.removeAll(authedDataSourceSet); } resultList = new ArrayList<>(datasourceSet); } result.put(Constants.DATA_LIST, resultList); putMsg(result, Status.SUCCESS); return result; } /** * authorized datasource * * @param loginUser login user * @param userId user id * @return authorized result code */ @Override public Map<String, Object> authedDatasource(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); result.put(Constants.DATA_LIST, authedDatasourceList); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml
<?xml version="1.0" encoding="UTF-8" ?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" > <mapper namespace="org.apache.dolphinscheduler.dao.mapper.UserMapper"> <sql id="baseSql"> id, user_name, user_password, user_type, email, phone, tenant_id, create_time, update_time, queue, state </sql> <sql id="baseSqlV2"> ${alias}.id, ${alias}.user_name, ${alias}.user_password, ${alias}.user_type, ${alias}.email, ${alias}.phone, ${alias}.tenant_id, ${alias}.create_time, ${alias}.update_time, ${alias}.queue, ${alias}.state </sql> <select id="queryAllGeneralUser" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSql"/> from t_ds_user where user_type=1; </select> <select id="queryByUserNameAccurately" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSql"/> from t_ds_user where user_name=#{userName} </select> <select id="queryUserByNamePassword" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSql"/> from t_ds_user where user_name=#{userName} and user_password = #{password} </select> <select id="queryUserPaging" resultType="org.apache.dolphinscheduler.dao.entity.User"> select u.id,u.user_name,u.user_password,u.user_type,u.email,u.phone,u.tenant_id,u.create_time, u.update_time,t.tenant_code,u.state, case when u.queue <![CDATA[ <> ]]> '' then u.queue else q.queue_name end as queue, q.queue_name from t_ds_user u left join t_ds_tenant t on u.tenant_id=t.id left join t_ds_queue q on t.queue_id = q.id where 1=1 <if test="userName!=null and userName != ''"> and u.user_name like concat ('%', #{userName}, '%') </if> order by u.update_time desc </select> <select id="queryDetailsById" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSqlV2"> <property name="alias" value="u"/> </include> , case when u.queue <![CDATA[ <> ]]> '' then u.queue else q.queue_name end as queue_name from t_ds_user u left join t_ds_tenant t on u.tenant_id=t.id left join t_ds_queue q on t.queue_id = q.id WHERE u.id = #{userId} </select> <select id="queryUserListByAlertGroupId" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSqlV2"> <property name="alias" value="u"/> </include> from t_ds_user u, t_ds_relation_user_alertgroup rel where u.id = rel.user_id AND rel.alertgroup_id = #{alertgroupId} </select> <select id="queryUserListByTenant" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSql"/> from t_ds_user where tenant_id = #{tenantId} </select> <select id="queryTenantCodeByUserId" resultType="org.apache.dolphinscheduler.dao.entity.User"> SELECT <include refid="baseSqlV2"> <property name="alias" value="u"/> </include> , t.tenant_code FROM t_ds_user u, t_ds_tenant t WHERE u.tenant_id = t.id AND u.id = #{userId} </select> <select id="queryUserByToken" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSqlV2"> <property name="alias" value="u"/> </include> from t_ds_user u ,t_ds_access_token t where u.id = t.user_id and token=#{token} and t.expire_time > NOW() </select> <select id="queryUserListByQueue" resultType="org.apache.dolphinscheduler.dao.entity.User"> select <include refid="baseSql"/> from t_ds_user where queue = #{queue} </select> <select id="existUser" resultType="java.lang.Boolean"> select 1 from t_ds_user where queue = #{queue} limit 1 </select> <update id="updateUserQueue" parameterType="java.lang.String"> update t_ds_user set queue = #{newQueue} where queue = #{oldQueue} </update> <select id="selectByIds" resultType="org.apache.dolphinscheduler.dao.entity.User"> select * from t_ds_user where id in <foreach item="id" index="index" collection="ids" open="(" separator="," close=")"> #{id} </foreach> </select> </mapper>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="datasource-popup-model"> <div class="content-p"> <div class="create-datasource-model"> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Datasource')}}</template> <template slot="content" size="small"> <el-select style="width: 100%;" v-model="type"> <el-option v-for="item in datasourceTypeList" :key="item.value" :value="item.value" :label="item.label"> </el-option> </el-select> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Datasource Name')}}</template> <template slot="content"> <el-input type="input" v-model="name" maxlength="60" size="small" :placeholder="$t('Please enter datasource name')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name">{{$t('Description')}}</template> <template slot="content"> <el-input type="textarea" v-model="note" size="small" :placeholder="$t('Please enter description')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('IP')}}</template> <template slot="content"> <el-input type="input" v-model="host" maxlength="60" size="small" :placeholder="$t('Please enter IP')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Port')}}</template> <template slot="content"> <el-input type="input" v-model="port" size="small" :placeholder="$t('Please enter port')"> </el-input> </template> </m-list-box-f> <m-list-box-f :class="{hidden:showPrincipal}"> <template slot="name"><strong>*</strong>Principal</template> <template slot="content"> <el-input type="input" v-model="principal" size="small" :placeholder="$t('Please enter Principal')"> </el-input> </template> </m-list-box-f> <m-list-box-f :class="{hidden:showPrincipal}"> <template slot="name">krb5.conf</template> <template slot="content"> <el-input type="input" v-model="javaSecurityKrb5Conf" size="small" :placeholder="$t('Please enter the kerberos authentication parameter java.security.krb5.conf')"> </el-input> </template> </m-list-box-f> <m-list-box-f :class="{hidden:showPrincipal}"> <template slot="name">keytab.username</template> <template slot="content"> <el-input type="input" v-model="loginUserKeytabUsername" size="small" :placeholder="$t('Please enter the kerberos authentication parameter login.user.keytab.username')"> </el-input> </template> </m-list-box-f> <m-list-box-f :class="{hidden:showPrincipal}"> <template slot="name">keytab.path</template> <template slot="content"> <el-input type="input" v-model="loginUserKeytabPath" size="small" :placeholder="$t('Please enter the kerberos authentication parameter login.user.keytab.path')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('User Name')}}</template> <template slot="content"> <el-input type="input" v-model="userName" maxlength="60" size="small" :placeholder="$t('Please enter user name')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name">{{$t('Password')}}</template> <template slot="content"> <el-input type="password" v-model="password" size="small" :placeholder="$t('Please enter your password')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong :class="{hidden:showDatabase}">*</strong>{{$t('Database Name')}}</template> <template slot="content"> <el-input type="input" v-model="database" maxlength="60" size="small" :placeholder="$t('Please enter database name')"> </el-input> </template> </m-list-box-f> <m-list-box-f v-if="showConnectType"> <template slot="name"><strong>*</strong>{{$t('Oracle Connect Type')}}</template> <template slot="content"> <el-radio-group v-model="connectType" size="small" style="vertical-align: sub;"> <el-radio :label="'ORACLE_SERVICE_NAME'">{{$t('Oracle Service Name')}}</el-radio> <el-radio :label="'ORACLE_SID'">{{$t('Oracle SID')}}</el-radio> </el-radio-group> </template> </m-list-box-f> <m-list-box-f> <template slot="name">{{$t('jdbc connect parameters')}}</template> <template slot="content"> <el-input type="textarea" v-model="other" :autosize="{minRows:2}" size="small" :placeholder="_rtOtherPlaceholder()"> </el-input> </template> </m-list-box-f> </div> </div> <div class="bottom-p"> <el-button type="text" ize="mini" @click="_close()"> {{$t('Cancel')}} </el-button> <el-button type="success" size="mini" round @click="_testConnect()" :loading="testLoading">{{testLoading ? 'Loading...' : $t('Test Connect')}}</el-button> <el-button type="primary" size="mini" round :loading="spinnerLoading" @click="_ok()">{{spinnerLoading ? 'Loading...' :item ? `${$t('Edit')}` : `${$t('Submit')}`}} </el-button> </div> </div> </template> <script> import i18n from '@/module/i18n' import store from '@/conf/home/store' import { isJson } from '@/module/util/util' import mListBoxF from '@/module/components/listBoxF/listBoxF' export default { name: 'create-datasource', data () { return { store, // btn loading spinnerLoading: false, // Data source type type: 'MYSQL', // name name: '', // description note: '', // host host: '', // port port: '', // data storage name database: '', // principal principal: '', // java.security.krb5.conf javaSecurityKrb5Conf: '', // login.user.keytab.username loginUserKeytabUsername: '', // login.user.keytab.path loginUserKeytabPath: '', // database username userName: '', // Database password password: '', // Database connect type connectType: '', // Jdbc connection parameter other: '', // btn test loading testLoading: false, showPrincipal: true, showDatabase: false, showConnectType: false, isShowPrincipal: true, prePortMapper: {}, datasourceTypeList: [ { value: 'MYSQL', label: 'MYSQL' }, { value: 'POSTGRESQL', label: 'POSTGRESQL' }, { value: 'HIVE', label: 'HIVE/IMPALA' }, { value: 'SPARK', label: 'SPARK' }, { value: 'CLICKHOUSE', label: 'CLICKHOUSE' }, { value: 'ORACLE', label: 'ORACLE' }, { value: 'SQLSERVER', label: 'SQLSERVER' }, { value: 'DB2', label: 'DB2' }, { value: 'PRESTO', label: 'PRESTO' } ] } }, props: { item: Object }, methods: { _rtOtherPlaceholder () { return `${i18n.$t('Please enter format')} {"key1":"value1","key2":"value2"...} ${i18n.$t('connection parameter')}` }, /** * submit */ _ok () { if (this._verification()) { this._verifName().then(res => { this._submit() }) } }, /** * close */ _close () { this.$emit('close') }, /** * return param */ _rtParam () { return { type: this.type, name: this.name, note: this.note, host: this.host, port: this.port, database: this.database, principal: this.principal, javaSecurityKrb5Conf: this.javaSecurityKrb5Conf, loginUserKeytabUsername: this.loginUserKeytabUsername, loginUserKeytabPath: this.loginUserKeytabPath, userName: this.userName, password: this.password, connectType: this.connectType, other: this.other === '' ? null : JSON.parse(this.other) } }, /** * test connect */ _testConnect () { if (this._verification()) { this.testLoading = true this.store.dispatch('datasource/connectDatasources', this._rtParam()).then(res => { setTimeout(() => { this.$message.success(res.msg) this.testLoading = false }, 800) }).catch(e => { this.$message.error(e.msg || '') this.testLoading = false }) } }, /** * Verify that the data source name exists */ _verifName () { return new Promise((resolve, reject) => { if (this.name === this.item.name) { resolve() return } this.store.dispatch('datasource/verifyName', { name: this.name }).then(res => { resolve() }).catch(e => { this.$message.error(e.msg || '') reject(e) }) }) }, /** * verification */ _verification () { if (!this.name) { this.$message.warning(`${i18n.$t('Please enter resource name')}`) return false } if (!this.host) { this.$message.warning(`${i18n.$t('Please enter IP/hostname')}`) return false } if (!this.port) { this.$message.warning(`${i18n.$t('Please enter port')}`) return false } if (!this.userName) { this.$message.warning(`${i18n.$t('Please enter user name')}`) return false } if (!this.database && this.showDatabase === false) { this.$message.warning(`${i18n.$t('Please enter database name')}`) return false } if (this.other) { if (!isJson(this.other)) { this.$message.warning(`${i18n.$t('jdbc connection parameters is not a correct JSON format')}`) return false } } return true }, /** * submit => add/update */ _submit () { this.spinnerLoading = true let param = this._rtParam() // edit if (this.item) { param.id = this.item.id } this.store.dispatch(`datasource/${this.item ? 'updateDatasource' : 'createDatasources'}`, param).then(res => { this.$message.success(res.msg) this.spinnerLoading = false this.$emit('onUpdate') }).catch(e => { this.$message.error(e.msg || '') this.spinnerLoading = false }) }, /** * Get modified data */ _getEditDatasource () { this.store.dispatch('datasource/getEditDatasource', { id: this.item.id }).then(res => { this.type = res.type this.name = res.name this.note = res.note this.host = res.host // When in Editpage, Prevent default value overwrite backfill value setTimeout(() => { this.port = res.port }, 0) this.principal = res.principal this.javaSecurityKrb5Conf = res.javaSecurityKrb5Conf this.loginUserKeytabUsername = res.loginUserKeytabUsername this.loginUserKeytabPath = res.loginUserKeytabPath this.database = res.database this.userName = res.userName this.password = res.password this.connectType = res.connectType this.other = res.other === null ? '' : JSON.stringify(res.other) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * Set default port for each type. */ _setDefaultValues (value) { // Default type is MYSQL let type = this.type || 'MYSQL' let defaultPort = this._getDefaultPort(type) // Backfill the previous input from memcache let mapperPort = this.prePortMapper[type] this.port = mapperPort || defaultPort }, /** * Get default port by type */ _getDefaultPort (type) { let defaultPort = '' switch (type) { case 'MYSQL': defaultPort = '3306' break case 'POSTGRESQL': defaultPort = '5432' break case 'HIVE': defaultPort = '10000' break case 'SPARK': defaultPort = '10015' break case 'CLICKHOUSE': defaultPort = '8123' break case 'ORACLE': defaultPort = '1521' break case 'SQLSERVER': defaultPort = '1433' break case 'DB2': defaultPort = '50000' break case 'PRESTO': defaultPort = '8080' break default: break } return defaultPort } }, created () { // Backfill if (this.item.id) { this._getEditDatasource() } this._setDefaultValues() }, watch: { type (value) { if (value === 'POSTGRESQL') { this.showDatabase = true } else { this.showDatabase = false } if (value === 'ORACLE' && !this.item.id) { this.showConnectType = true this.connectType = 'ORACLE_SERVICE_NAME' } else if (value === 'ORACLE' && this.item.id) { this.showConnectType = true } else { this.showConnectType = false } // Set default port for each type datasource this._setDefaultValues(value) return new Promise((resolve, reject) => { this.store.dispatch('datasource/getKerberosStartupState').then(res => { this.isShowPrincipal = res if ((value === 'HIVE' || value === 'SPARK') && this.isShowPrincipal === true) { this.showPrincipal = false } else { this.showPrincipal = true } }).catch(e => { this.$message.error(e.msg || '') reject(e) }) }) }, /** * Cache the previous input port for each type datasource * @param value */ port (value) { this.prePortMapper[this.type] = value } }, mounted () { }, components: { mListBoxF } } </script> <style lang="scss" rel="stylesheet/scss"> .datasource-popup-model { background: #fff; border-radius: 3px; .top-p { height: 70px; line-height: 70px; border-radius: 3px 3px 0 0; padding: 0 20px; >span { font-size: 20px; } } .bottom-p { text-align: right; height: 72px; line-height: 72px; border-radius: 0 0 3px 3px; padding: 0 20px; } .content-p { min-width: 850px; min-height: 100px; .list-box-f { .text { width: 166px; } .cont { width: calc(100% - 186px); } } } .radio-label-last { margin-left: 0px !important; } } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/createWarning.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <m-popover ref="popover" :ok-text="item ? $t('Edit') : $t('Submit')" @ok="_ok" @close="close"> <template slot="content"> <div class="create-warning-model"> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Group Name')}}</template> <template slot="content"> <el-input type="input" v-model="groupName" maxlength="60" size="small" :placeholder="$t('Please enter group name')"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Alarm plugin instance')}}</template> <template slot="content"> <el-select v-model="alertInstanceIds" size="small" style="width: 100%" multiple> <el-option v-for="items in allAlertPluginInstance" :key="items.id" :value="items.id" :label="items.instanceName"> </el-option> </el-select> </template> </m-list-box-f> <m-list-box-f> <template slot="name">{{$t('Remarks')}}</template> <template slot="content"> <el-input type="textarea" v-model="description" size="small" :placeholder="$t('Please enter description')"> </el-input> </template> </m-list-box-f> </div> </template> </m-popover> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import store from '@/conf/home/store' import mPopover from '@/module/components/popup/popover' import mListBoxF from '@/module/components/listBoxF/listBoxF' export default { name: 'create-warning', data () { return { store, groupName: '', alertInstanceIds: [], description: '' } }, props: { item: Object, allAlertPluginInstance: Array }, methods: { _ok () { if (this._verification()) { // The name is not verified if (this.item && this.item.groupName === this.groupName) { this._submit() return } // Verify username this.store.dispatch('security/verifyName', { type: 'alertgroup', groupName: this.groupName }).then(res => { this._submit() }).catch(e => { this.$message.error(e.msg || '') }) } }, _verification () { // group name if (!this.groupName.replace(/\s*/g, '')) { this.$message.warning(`${i18n.$t('Please enter group name')}`) return false } if (this.alertInstanceIds) { this.$message.warning(`${i18n.$t('Select Alarm plugin instance')}`) return false } return true }, _submit () { let param = { groupName: this.groupName, alertInstanceIds: this.alertInstanceIds.join(','), description: this.description } if (this.item) { param.id = this.item.id } this.$refs.popover.spinnerLoading = true this.store.dispatch(`security/${this.item ? 'updateAlertgrou' : 'createAlertgrou'}`, param).then(res => { this.$emit('onUpdate') this.$message.success(res.msg) this.$refs.popover.spinnerLoading = false }).catch(e => { this.$message.error(e.msg || '') this.$refs.popover.spinnerLoading = false }) }, close () { this.$emit('close') } }, watch: {}, created () { if (this.item) { this.groupName = this.item.groupName let dataStrArr = this.item.alertInstanceIds.split(',') this.alertInstanceIds = _.map(dataStrArr, v => { return +v }) this.description = this.item.description } }, mounted () { }, components: { mPopover, mListBoxF } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-ui/src/js/conf/home/pages/user/pages/token/_source/createToken.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <m-popover ref="popover" :ok-text="item ? $t('Edit') : $t('Submit')" @ok="_ok" @close="close"> <template slot="content"> <div class="create-token-model"> <m-list-box-f> <template slot="name"><strong>*</strong>{{$t('Expiration time')}}</template> <template slot="content"> <el-date-picker type="datetime" :picker-options="pickerOptions" v-model="expireTime" @on-change="_onChange" format="yyyy-MM-dd HH:mm:ss" size="small"> </el-date-picker> </template> </m-list-box-f> <m-list-box-f v-if="auth"> <template slot="name"><strong>*</strong>{{$t('User')}}</template> <template slot="content"> <el-select v-model="userId" @change="_onChange" size="small"> <el-option v-for="city in userIdList" :key="city.id" :value="city.id" :label="city.userName"> </el-option> </el-select> </template> </m-list-box-f> <m-list-box-f> <template slot="name">Token</template> <template slot="content"> <el-input readonly style="width: 306px;" type="input" size="small" v-model="token" :placeholder="$t('Please enter token')"> </el-input> <el-button size="small" @click="_generateToken" :loading="tokenLoading">{{$t('Generate token')}}</el-button> </template> </m-list-box-f> </div> </template> </m-popover> </template> <script> import _ from 'lodash' import dayjs from 'dayjs' import i18n from '@/module/i18n' import store from '@/conf/home/store' import Permissions from '@/module/permissions' import mPopover from '@/module/components/popup/popover' import mListBoxF from '@/module/components/listBoxF/listBoxF' export default { name: 'create-token', data () { return { store, expireTime: dayjs().format('YYYY-MM-DD 23:59:59'), userId: null, disabledDate: date => (date.getTime() - new Date(new Date().getTime() - 24 * 60 * 60 * 1000)) < 0, token: '', userIdList: [], tokenLoading: false, auth: !Permissions.getAuth(), pickerOptions: { disabledDate (time) { return time.getTime() < Date.now() - 8.64e7 // 当前时间以后可以选择当前时间 } } } }, props: { item: Object }, methods: { _ok () { if (this._verification()) { this._submit() } }, _verification () { if (!this.token) { this.$message.warning(`${i18n.$t('Please generate token')}`) return false } return true }, _submit () { let param = { expireTime: dayjs(this.expireTime).format('YYYY-MM-DD HH:mm:ss'), userId: this.userId, token: this.token } if (this.item) { param.id = this.item.id } this.$refs.popover.spinnerLoading = true this.store.dispatch(`user/${this.item ? 'updateToken' : 'createToken'}`, param).then(res => { this.$emit('onUpdate') this.$message.success(res.msg) this.$refs.popover.spinnerLoading = false }).catch(e => { this.$message.error(e.msg || '') this.$refs.popover.spinnerLoading = false }) }, _generateToken () { this.tokenLoading = true this.store.dispatch('user/generateToken', { userId: this.userId, expireTime: this.expireTime }).then(res => { setTimeout(() => { this.tokenLoading = false this.token = res }, 1200) }).catch(e => { this.token = '' this.$message.error(e.msg || '') this.tokenLoading = false }) }, _onChange () { this.token = '' }, close () { this.$emit('close') } }, watch: {}, created () { const d = (userId) => { if (this.item) { this.expireTime = this.item.expireTime this.userId = this.item.userId this.token = this.item.token } else { this.userId = userId } } if (this.auth) { this.store.dispatch('security/getUsersAll').then(res => { this.userIdList = _.map(res, v => _.pick(v, ['id', 'userName'])) d(this.userIdList[0].id) }) } else { d(this.store.state.user.userInfo.id) } }, mounted () { }, components: { mPopover, mListBoxF } } </script> <style lang="scss" rel="stylesheet/scss"> .create-token-model { width: 640px; } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': 'User Name', 'Please enter user name': 'Please enter user name', Password: 'Password', 'Please enter your password': 'Please enter your password', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22', Login: 'Login', Home: 'Home', 'Failed to create node to save': 'Failed to create node to save', 'Global parameters': 'Global parameters', 'Local parameters': 'Local parameters', 'Copy success': 'Copy success', 'The browser does not support automatic copying': 'The browser does not support automatic copying', 'Whether to save the DAG graph': 'Whether to save the DAG graph', 'Current node settings': 'Current node settings', 'View history': 'View history', 'View log': 'View log', 'Force success': 'Force success', 'Enter this child node': 'Enter this child node', 'Node name': 'Node name', 'Please enter name (required)': 'Please enter name (required)', 'Run flag': 'Run flag', Normal: 'Normal', 'Prohibition execution': 'Prohibition execution', 'Please enter description': 'Please enter description', 'Number of failed retries': 'Number of failed retries', Times: 'Times', 'Failed retry interval': 'Failed retry interval', Minute: 'Minute', 'Delay execution time': 'Delay execution time', 'Delay execution': 'Delay execution', 'Forced success': 'Forced success', Cancel: 'Cancel', 'Confirm add': 'Confirm add', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process', 'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process', 'Name already exists': 'Name already exists', 'Download Log': 'Download Log', 'Refresh Log': 'Refresh Log', 'Enter full screen': 'Enter full screen', 'Cancel full screen': 'Cancel full screen', Close: 'Close', 'Update log success': 'Update log success', 'No more logs': 'No more logs', 'No log': 'No log', 'Loading Log...': 'Loading Log...', 'Set the DAG diagram name': 'Set the DAG diagram name', 'Please enter description(optional)': 'Please enter description(optional)', 'Set global': 'Set global', 'Whether to go online the process definition': 'Whether to go online the process definition', 'Whether to update the process definition': 'Whether to update the process definition', Add: 'Add', 'DAG graph name cannot be empty': 'DAG graph name cannot be empty', 'Create Datasource': 'Create Datasource', 'Project Home': 'Workflow Monitor', 'Project Manage': 'Project', 'Create Project': 'Create Project', 'Cron Manage': 'Cron Manage', 'Copy Workflow': 'Copy Workflow', 'Tenant Manage': 'Tenant Manage', 'Create Tenant': 'Create Tenant', 'User Manage': 'User Manage', 'Create User': 'Create User', 'User Information': 'User Information', 'Edit Password': 'Edit Password', Success: 'Success', Failed: 'Failed', Delete: 'Delete', 'Please choose': 'Please choose', 'Please enter a positive integer': 'Please enter a positive integer', 'Program Type': 'Program Type', 'Main Class': 'Main Class', 'Main Jar Package': 'Main Jar Package', 'Please enter main jar package': 'Please enter main jar package', 'Please enter main class': 'Please enter main class', 'Main Arguments': 'Main Arguments', 'Please enter main arguments': 'Please enter main arguments', 'Option Parameters': 'Option Parameters', 'Please enter option parameters': 'Please enter option parameters', Resources: 'Resources', 'Custom Parameters': 'Custom Parameters', 'Custom template': 'Custom template', Datasource: 'Datasource', methods: 'methods', 'Please enter the procedure method': 'Please enter the procedure script \n\ncall procedure:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\ncall function:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ', 'The procedure method script example': 'example:{call <procedure-name>[(?,?, ...)]} or {?= call <procedure-name>[(?,?, ...)]}', Script: 'Script', 'Please enter script(required)': 'Please enter script(required)', 'Deploy Mode': 'Deploy Mode', 'Driver Cores': 'Driver Cores', 'Please enter Driver cores': 'Please enter Driver cores', 'Driver Memory': 'Driver Memory', 'Please enter Driver memory': 'Please enter Driver memory', 'Executor Number': 'Executor Number', 'Please enter Executor number': 'Please enter Executor number', 'The Executor number should be a positive integer': 'The Executor number should be a positive integer', 'Executor Memory': 'Executor Memory', 'Please enter Executor memory': 'Please enter Executor memory', 'Executor Cores': 'Executor Cores', 'Please enter Executor cores': 'Please enter Executor cores', 'Memory should be a positive integer': 'Memory should be a positive integer', 'Core number should be positive integer': 'Core number should be positive integer', 'Flink Version': 'Flink Version', 'JobManager Memory': 'JobManager Memory', 'Please enter JobManager memory': 'Please enter JobManager memory', 'TaskManager Memory': 'TaskManager Memory', 'Please enter TaskManager memory': 'Please enter TaskManager memory', 'Slot Number': 'Slot Number', 'Please enter Slot number': 'Please enter Slot number', Parallelism: 'Parallelism', 'Please enter Parallelism': 'Please enter Parallelism', 'TaskManager Number': 'TaskManager Number', 'Please enter TaskManager number': 'Please enter TaskManager number', 'App Name': 'App Name', 'Please enter app name(optional)': 'Please enter app name(optional)', 'SQL Type': 'SQL Type', 'Send Email': 'Send Email', 'Log display': 'Log display', 'Max Numbers Return': 'Number of records to return', 'Max Numbers Return placeholder': 'Default is 10000, a large value may cause high pressure on the memory', 'Max Numbers Return required': 'Number of records to return parameter must be a number in the range of 0 - 2147483647', 'rows of result': 'rows of result', Title: 'Title', 'Please enter the title of email': 'Please enter the title of email', Table: 'Table', TableMode: 'Table', Attachment: 'Attachment', 'SQL Parameter': 'SQL Parameter', 'SQL Statement': 'SQL Statement', 'UDF Function': 'UDF Function', 'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)', 'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)', 'One form or attachment must be selected': 'One form or attachment must be selected', 'Mail subject required': 'Mail subject required', 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', 'Please enter IP': 'Please enter IP', Port: 'Port', 'Please enter port': 'Please enter port', 'Database Name': 'Database Name', 'Please enter database name': 'Please enter database name', 'Oracle Connect Type': 'ServiceName or SID', 'Oracle Service Name': 'ServiceName', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc connect parameters', 'Test Connect': 'Test Connect', 'Please enter resource name': 'Please enter resource name', 'Please enter resource folder name': 'Please enter resource folder name', 'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement', 'Please enter IP/hostname': 'Please enter IP/hostname', 'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format', '#': '#', 'Datasource Type': 'Datasource Type', 'Datasource Parameter': 'Datasource Parameter', 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', 'Switch Version Successfully': 'Switch Version Successfully', 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', State: 'State', 'Process Status Statistics': 'Process Status Statistics', 'Process Definition Statistics': 'Process Definition Statistics', 'Project Name': 'Project Name', 'Please enter name': 'Please enter name', 'Owned Users': 'Owned Users', 'Process Pid': 'Process Pid', 'Zk registration directory': 'Zk registration directory', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': 'Last heartbeat time', 'Edit Tenant': 'Edit Tenant', 'OS Tenant Code': 'OS Tenant Code', 'Tenant Name': 'Tenant Name', Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the os tenant code in English': 'Please enter the os tenant code in English', 'Please enter os tenant code in English': 'Please enter os tenant code in English', 'Please enter os tenant code': 'Please enter os tenant code', 'Please enter tenant Name': 'Please enter tenant Name', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed', 'Edit User': 'Edit User', Tenant: 'Tenant', Email: 'Email', Phone: 'Phone', 'User Type': 'User Type', 'Please enter phone number': 'Please enter phone number', 'Please enter email': 'Please enter email', 'Please enter the correct email format': 'Please enter the correct email format', 'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format', Project: 'Project', Authorize: 'Authorize', 'File resources': 'File resources', 'UDF resources': 'UDF resources', 'UDF resources directory': 'UDF resources directory', 'Please select UDF resources directory': 'Please select UDF resources directory', 'Alarm group': 'Alarm group', 'Alarm group required': 'Alarm group required', 'Edit alarm group': 'Edit alarm group', 'Create alarm group': 'Create alarm group', 'Create Alarm Instance': 'Create Alarm Instance', 'Edit Alarm Instance': 'Edit Alarm Instance', 'Group Name': 'Group Name', 'Alarm instance name': 'Alarm instance name', 'Alarm plugin name': 'Alarm plugin name', 'Select plugin': 'Select plugin', 'Select Alarm plugin': 'Please select an Alarm plugin', 'Please enter group name': 'Please enter group name', 'Instance parameter exception': 'Instance parameter exception', 'Group Type': 'Group Type', 'Alarm plugin instance': 'Alarm plugin instance', 'Select Alarm plugin instance': 'Please select an Alarm plugin instance', Remarks: 'Remarks', SMS: 'SMS', 'Managing Users': 'Managing Users', Permission: 'Permission', Administrator: 'Administrator', 'Confirm Password': 'Confirm Password', 'Please enter confirm password': 'Please enter confirm password', 'Password cannot be in Chinese': 'Password cannot be in Chinese', 'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password', 'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese', 'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password', 'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password', 'Please select the datasource': 'Please select the datasource', 'Please select resources': 'Please select resources', Query: 'Query', 'Non Query': 'Non Query', 'prop(required)': 'prop(required)', 'value(optional)': 'value(optional)', 'value(required)': 'value(required)', 'prop is empty': 'prop is empty', 'value is empty': 'value is empty', 'prop is repeat': 'prop is repeat', 'Start Time': 'Start Time', 'End Time': 'End Time', crontab: 'crontab', 'Failure Strategy': 'Failure Strategy', online: 'online', offline: 'offline', 'Task Status': 'Task Status', 'Process Instance': 'Process Instance', 'Task Instance': 'Task Instance', 'Select date range': 'Select date range', startDate: 'startDate', endDate: 'endDate', Date: 'Date', Waiting: 'Waiting', Execution: 'Execution', Finish: 'Finish', 'Create File': 'Create File', 'Create folder': 'Create folder', 'File Name': 'File Name', 'Folder Name': 'Folder Name', 'File Format': 'File Format', 'Folder Format': 'Folder Format', 'File Content': 'File Content', 'Upload File Size': 'Upload File size cannot exceed 1g', Create: 'Create', 'Please enter the resource content': 'Please enter the resource content', 'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines', 'File Details': 'File Details', 'Download Details': 'Download Details', Return: 'Return', Save: 'Save', 'File Manage': 'File Manage', 'Upload Files': 'Upload Files', 'Create UDF Function': 'Create UDF Function', 'Upload UDF Resources': 'Upload UDF Resources', 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', Executor: 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', host: 'host', 'fault-tolerant sign': 'fault-tolerant sign', Rerun: 'Rerun', 'Recovery Failed': 'Recovery Failed', Stop: 'Stop', Pause: 'Pause', 'Recovery Suspend': 'Recovery Suspend', Gantt: 'Gantt', 'Node Type': 'Node Type', 'Submit Time': 'Submit Time', Duration: 'Duration', 'Retry Count': 'Retry Count', 'Task Name': 'Task Name', 'Task Date': 'Task Date', 'Source Table': 'Source Table', 'Record Number': 'Record Number', 'Target Table': 'Target Table', 'Online viewing type is not supported': 'Online viewing type is not supported', Size: 'Size', Rename: 'Rename', Download: 'Download', Export: 'Export', 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', 'UDF Function Name': 'UDF Function Name', FILE: 'FILE', UDF: 'UDF', 'File Subdirectory': 'File Subdirectory', 'Please enter a function name': 'Please enter a function name', 'Package Name': 'Package Name', 'Please enter a Package name': 'Please enter a Package name', Parameter: 'Parameter', 'Please enter a parameter': 'Please enter a parameter', 'UDF Resources': 'UDF Resources', 'Upload Resources': 'Upload Resources', Instructions: 'Instructions', 'Please enter a instructions': 'Please enter a instructions', 'Please enter a UDF function name': 'Please enter a UDF function name', 'Select UDF Resources': 'Select UDF Resources', 'Class Name': 'Class Name', 'Jar Package': 'Jar Package', 'Library Name': 'Library Name', 'UDF Resource Name': 'UDF Resource Name', 'File Size': 'File Size', Description: 'Description', 'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items', 'Select Line Connection': 'Select Line Connection', 'Delete selected lines or nodes': 'Delete selected lines or nodes', 'Full Screen': 'Full Screen', Unpublished: 'Unpublished', 'Start Process': 'Start Process', 'Execute from the current node': 'Execute from the current node', 'Recover tolerance fault process': 'Recover tolerance fault process', 'Resume the suspension process': 'Resume the suspension process', 'Execute from the failed nodes': 'Execute from the failed nodes', 'Complement Data': 'Complement Data', 'Scheduling execution': 'Scheduling execution', 'Recovery waiting thread': 'Recovery waiting thread', 'Submitted successfully': 'Submitted successfully', Executing: 'Executing', 'Ready to pause': 'Ready to pause', 'Ready to stop': 'Ready to stop', 'Need fault tolerance': 'Need fault tolerance', Kill: 'Kill', 'Waiting for thread': 'Waiting for thread', 'Waiting for dependence': 'Waiting for dependence', Start: 'Start', Copy: 'Copy', 'Copy name': 'Copy name', 'Copy path': 'Copy path', 'Please enter keyword': 'Please enter keyword', 'File Upload': 'File Upload', 'Drag the file into the current upload window': 'Drag the file into the current upload window', 'Drag area upload': 'Drag area upload', Upload: 'Upload', 'ReUpload File': 'ReUpload File', 'Please enter file name': 'Please enter file name', 'Please select the file to upload': 'Please select the file to upload', 'Resources manage': 'Resources', Security: 'Security', Logout: 'Logout', 'No data': 'No data', 'Uploading...': 'Uploading...', 'Loading...': 'Loading...', List: 'List', 'Unable to download without proper url': 'Unable to download without proper url', Process: 'Process', 'Process definition': 'Process definition', 'Task record': 'Task record', 'Warning group manage': 'Warning group manage', 'Warning instance manage': 'Warning instance manage', 'Servers manage': 'Servers manage', 'UDF manage': 'UDF manage', 'Resource manage': 'Resource manage', 'Function manage': 'Function manage', 'Edit password': 'Edit password', 'Ordinary users': 'Ordinary users', 'Create process': 'Create process', 'Import process': 'Import process', 'Timing state': 'Timing state', Timing: 'Timing', Timezone: 'Timezone', TreeView: 'TreeView', 'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat', 'Mailbox input is illegal': 'Mailbox input is illegal', 'Please set the parameters before starting': 'Please set the parameters before starting', Continue: 'Continue', End: 'End', 'Node execution': 'Node execution', 'Backward execution': 'Backward execution', 'Forward execution': 'Forward execution', 'Execute only the current node': 'Execute only the current node', 'Notification strategy': 'Notification strategy', 'Notification group': 'Notification group', 'Please select a notification group': 'Please select a notification group', receivers: 'receivers', receiverCcs: 'receiverCcs', 'Whether it is a complement process?': 'Whether it is a complement process?', 'Schedule date': 'Schedule date', 'Mode of execution': 'Mode of execution', 'Serial execution': 'Serial execution', 'Parallel execution': 'Parallel execution', 'Set parameters before timing': 'Set parameters before timing', 'Start and stop time': 'Start and stop time', 'Please select time': 'Please select time', 'Please enter crontab': 'Please enter crontab', none_1: 'none', success_1: 'success', failure_1: 'failure', All_1: 'All', Toolbar: 'Toolbar', 'View variables': 'View variables', 'Format DAG': 'Format DAG', 'Refresh DAG status': 'Refresh DAG status', Return_1: 'Return', 'Please enter format': 'Please enter format', 'connection parameter': 'connection parameter', 'Process definition details': 'Process definition details', 'Create process definition': 'Create process definition', 'Scheduled task list': 'Scheduled task list', 'Process instance details': 'Process instance details', 'Create Resource': 'Create Resource', 'User Center': 'User Center', AllStatus: 'All', None: 'None', Name: 'Name', 'Process priority': 'Process priority', 'Task priority': 'Task priority', 'Task timeout alarm': 'Task timeout alarm', 'Timeout strategy': 'Timeout strategy', 'Timeout alarm': 'Timeout alarm', 'Timeout failure': 'Timeout failure', 'Timeout period': 'Timeout period', 'Waiting Dependent complete': 'Waiting Dependent complete', 'Waiting Dependent start': 'Waiting Dependent start', 'Check interval': 'Check interval', 'Timeout must be longer than check interval': 'Timeout must be longer than check interval', 'Timeout strategy must be selected': 'Timeout strategy must be selected', 'Timeout must be a positive integer': 'Timeout must be a positive integer', 'Add dependency': 'Add dependency', and: 'and', or: 'or', month: 'month', week: 'week', day: 'day', hour: 'hour', Running: 'Running', 'Waiting for dependency to complete': 'Waiting for dependency to complete', Selected: 'Selected', CurrentHour: 'CurrentHour', Last1Hour: 'Last1Hour', Last2Hours: 'Last2Hours', Last3Hours: 'Last3Hours', Last24Hours: 'Last24Hours', today: 'today', Last1Days: 'Last1Days', Last2Days: 'Last2Days', Last3Days: 'Last3Days', Last7Days: 'Last7Days', ThisWeek: 'ThisWeek', LastWeek: 'LastWeek', LastMonday: 'LastMonday', LastTuesday: 'LastTuesday', LastWednesday: 'LastWednesday', LastThursday: 'LastThursday', LastFriday: 'LastFriday', LastSaturday: 'LastSaturday', LastSunday: 'LastSunday', ThisMonth: 'ThisMonth', LastMonth: 'LastMonth', LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', 'History task record': 'History task record', 'Please go online': 'Please go online', 'Queue value': 'Queue value', 'Please enter queue value': 'Please enter queue value', 'Worker group manage': 'Worker group manage', 'Create worker group': 'Create worker group', 'Edit worker group': 'Edit worker group', 'Token manage': 'Token manage', 'Create token': 'Create token', 'Edit token': 'Edit token', Addresses: 'Addresses', 'Worker Addresses': 'Worker Addresses', 'Please select the worker addresses': 'Please select the worker addresses', 'Failure time': 'Failure time', 'Expiration time': 'Expiration time', User: 'User', 'Please enter token': 'Please enter token', 'Generate token': 'Generate token', Monitor: 'Monitor', Group: 'Group', 'Queue statistics': 'Queue statistics', 'Command status statistics': 'Command status statistics', 'Task kill': 'Task Kill', 'Task queue': 'Task queue', 'Error command count': 'Error command count', 'Normal command count': 'Normal command count', Manage: ' Manage', 'Number of connections': 'Number of connections', Sent: 'Sent', Received: 'Received', 'Min latency': 'Min latency', 'Avg latency': 'Avg latency', 'Max latency': 'Max latency', 'Node count': 'Node count', 'Query time': 'Query time', 'Node self-test status': 'Node self-test status', 'Health status': 'Health status', 'Max connections': 'Max connections', 'Threads connections': 'Threads connections', 'Max used connections': 'Max used connections', 'Threads running connections': 'Threads running connections', 'Worker group': 'Worker group', 'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0', 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', 'Process Define Count': 'Work flow Define Count', 'Process Instance Running Count': 'Process Instance Running Count', 'command number of waiting for running': 'command number of waiting for running', 'failure command number': 'failure command number', 'tasks number of waiting running': 'tasks number of waiting running', 'task number of ready to kill': 'task number of ready to kill', 'Statistics manage': 'Statistics Manage', statistics: 'Statistics', 'select tenant': 'select tenant', 'Please enter Principal': 'Please enter Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path', 'The start time must not be the same as the end': 'The start time must not be the same as the end', 'Startup parameter': 'Startup parameter', 'Startup type': 'Startup type', 'warning of timeout': 'warning of timeout', 'Next five execution times': 'Next five execution times', 'Execute time': 'Execute time', 'Complement range': 'Complement range', 'Http Url': 'Http Url', 'Http Method': 'Http Method', 'Http Parameters': 'Http Parameters', 'Http Parameters Key': 'Http Parameters Key', 'Http Parameters Position': 'Http Parameters Position', 'Http Parameters Value': 'Http Parameters Value', 'Http Check Condition': 'Http Check Condition', 'Http Condition': 'Http Condition', 'Please Enter Http Url': 'Please Enter Http Url(required)', 'Please Enter Http Condition': 'Please Enter Http Condition', 'There is no data for this period of time': 'There is no data for this period of time', 'Worker addresses cannot be empty': 'Worker addresses cannot be empty', 'Please generate token': 'Please generate token', 'Spark Version': 'Spark Version', TargetDataBase: 'target database', TargetTable: 'target table', 'Please enter the table of target': 'Please enter the table of target', 'Please enter a Target Table(required)': 'Please enter a Target Table(required)', SpeedByte: 'speed(byte count)', SpeedRecord: 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', 'Modify User': 'Modify User', 'Whether directory': 'Whether directory', Yes: 'Yes', No: 'No', 'Hadoop Custom Params': 'Hadoop Params', 'Sqoop Advanced Parameters': 'Sqoop Params', 'Sqoop Job Name': 'Job Name', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', 'Please enter Hive Partition Values': 'Please enter Partition Value', 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', 'Please enter Fields Terminated': 'Please enter Fields Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Update Key': 'Please enter Update Key', 'Please enter Job Name(required)': 'Please enter Job Name(required)', 'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)', Direct: 'Direct', Type: 'Type', ModelType: 'ModelType', ColumnType: 'ColumnType', Database: 'Database', Column: 'Column', 'Map Column Hive': 'Map Column Hive', 'Map Column Java': 'Map Column Java', 'Export Dir': 'Export Dir', 'Hive partition Keys': 'Hive partition Keys', 'Hive partition Values': 'Hive partition Values', FieldsTerminated: 'FieldsTerminated', LinesTerminated: 'LinesTerminated', IsUpdate: 'IsUpdate', UpdateKey: 'UpdateKey', UpdateMode: 'UpdateMode', 'Target Dir': 'Target Dir', DeleteTargetDir: 'DeleteTargetDir', FileType: 'FileType', CompressionCodec: 'CompressionCodec', CreateHiveTable: 'CreateHiveTable', DropDelimiter: 'DropDelimiter', OverWriteSrc: 'OverWriteSrc', ReplaceDelimiter: 'ReplaceDelimiter', Concurrency: 'Concurrency', Form: 'Form', OnlyUpdate: 'OnlyUpdate', AllowInsert: 'AllowInsert', 'Data Source': 'Data Source', 'Data Target': 'Data Target', 'All Columns': 'All Columns', 'Some Columns': 'Some Columns', 'Branch flow': 'Branch flow', 'Custom Job': 'Custom Job', 'Custom Script': 'Custom Script', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'No resources exist': 'No resources exist', 'Please delete all non-existing resources': 'Please delete all non-existing resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Please delete all non-existent resources': 'Please delete all non-existent resources', Kinship: 'Workflow relationship', Reset: 'Reset', KinshipStateActive: 'Active', KinshipState1: 'Online', KinshipState0: 'Workflow is not online', KinshipState10: 'Scheduling is not online', 'Dag label display control': 'Dag label display control', Enable: 'Enable', Disable: 'Disable', 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!', 'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading', 'User name length is between 3 and 39': 'User name length is between 3 and 39', 'Timeout Settings': 'Timeout Settings', 'Connect Timeout': 'Connect Timeout', 'Socket Timeout': 'Socket Timeout', 'Connect timeout be a positive integer': 'Connect timeout be a positive integer', 'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer', ms: 'ms', 'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': 'Please select the waterdrop resources', zkDirectory: 'zkDirectory', 'Directory detail': 'Directory detail', 'Connection name': 'Connection name', 'Current connection settings': 'Current connection settings', 'Please save the DAG before formatting': 'Please save the DAG before formatting', 'Batch copy': 'Batch copy', 'Related items': 'Related items', 'Project name is required': 'Project name is required', 'Batch move': 'Batch move', Version: 'Version', 'Pre tasks': 'Pre tasks', 'Running Memory': 'Running Memory', 'Max Memory': 'Max Memory', 'Min Memory': 'Min Memory', 'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate', Info: 'Info', 'Datasource userName': 'owner', 'Resource userName': 'owner' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,726
[Improvement][UI] When we used the UI page, we found some problems such as parameter validation, parameter update shows success but actually work
### 1.When editing an existing data source, the data source type does not actually support switching, preferably not editing ![image](https://user-images.githubusercontent.com/52202080/123957653-a3435c80-d9de-11eb-8097-f14fc890586c.png) ![image](https://user-images.githubusercontent.com/52202080/123956430-46937200-d9dd-11eb-8bec-dc394ad0c654.png) ### 2.When editing an existing data source, the name of the existing data source is changed to show success, but does not actually take effect. ### 3.When a token is created, the expiration time can be set to empty, result in an illegal parameter and it can create success: ![image](https://user-images.githubusercontent.com/52202080/123960033-6af14d80-d9e1-11eb-87d6-1438962a8245.png) ![image](https://user-images.githubusercontent.com/52202080/123960120-82c8d180-d9e1-11eb-8df0-dc3598a6a0aa.png) ### 4.Non-Admin user profile page does not display tenant information ![image](https://user-images.githubusercontent.com/52202080/123957087-f8cb3980-d9dd-11eb-9b8f-ee349ee7eb91.png) ![image](https://user-images.githubusercontent.com/52202080/123957281-2d3ef580-d9de-11eb-9813-ef5c8311994d.png)
https://github.com/apache/dolphinscheduler/issues/5726
https://github.com/apache/dolphinscheduler/pull/5727
00e76558be001dc72cf60f4db93c881ed98db95a
1f0c67bfb772f46a0e7d6289b13a499aae403fe3
"2021-06-30T12:28:11Z"
java
"2021-07-14T05:51:10Z"
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': '用户名', 'Please enter user name': '请输入用户名', Password: '密码', 'Please enter your password': '请输入密码', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间', Login: '登录', Home: '首页', 'Failed to create node to save': '未创建节点保存失败', 'Global parameters': '全局参数', 'Local parameters': '局部参数', 'Copy success': '复制成功', 'The browser does not support automatic copying': '该浏览器不支持自动复制', 'Whether to save the DAG graph': '是否保存DAG图', 'Current node settings': '当前节点设置', 'View history': '查看历史', 'View log': '查看日志', 'Force success': '强制成功', 'Enter this child node': '进入该子节点', 'Node name': '节点名称', 'Please enter name (required)': '请输入名称(必填)', 'Run flag': '运行标志', Normal: '正常', 'Prohibition execution': '禁止执行', 'Please enter description': '请输入描述', 'Number of failed retries': '失败重试次数', Times: '次', 'Failed retry interval': '失败重试间隔', Minute: '分', 'Delay execution time': '延时执行时间', 'Delay execution': '延时执行', 'Forced success': '强制成功', Cancel: '取消', 'Confirm add': '确认添加', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流', 'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流', 'Name already exists': '名称已存在请重新输入', 'Download Log': '下载日志', 'Refresh Log': '刷新日志', 'Enter full screen': '进入全屏', 'Cancel full screen': '取消全屏', Close: '关闭', 'Update log success': '更新日志成功', 'No more logs': '暂无更多日志', 'No log': '暂无日志', 'Loading Log...': '正在努力请求日志中...', 'Set the DAG diagram name': '设置DAG图名称', 'Please enter description(optional)': '请输入描述(选填)', 'Set global': '设置全局', 'Whether to go online the process definition': '是否上线流程定义', 'Whether to update the process definition': '是否更新流程定义', Add: '添加', 'DAG graph name cannot be empty': 'DAG图名称不能为空', 'Create Datasource': '创建数据源', 'Project Home': '工作流监控', 'Project Manage': '项目管理', 'Create Project': '创建项目', 'Cron Manage': '定时管理', 'Copy Workflow': '复制工作流', 'Tenant Manage': '租户管理', 'Create Tenant': '创建租户', 'User Manage': '用户管理', 'Create User': '创建用户', 'User Information': '用户信息', 'Edit Password': '密码修改', Success: '成功', Failed: '失败', Delete: '删除', 'Please choose': '请选择', 'Please enter a positive integer': '请输入正整数', 'Program Type': '程序类型', 'Main Class': '主函数的Class', 'Main Jar Package': '主Jar包', 'Please enter main jar package': '请选择主Jar包', 'Please enter main class': '请填写主函数的Class', 'Main Arguments': '主程序参数', 'Please enter main arguments': '请输入主程序参数', 'Option Parameters': '选项参数', 'Please enter option parameters': '请输入选项参数', Resources: '资源', 'Custom Parameters': '自定义参数', 'Custom template': '自定义模版', Datasource: '数据源', methods: '方法', 'Please enter the procedure method': '请输入存储脚本 \n\n调用存储过程:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\n调用存储函数:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ', 'The procedure method script example': '示例:{call <procedure-name>[(?,?, ...)]} 或 {?= call <procedure-name>[(?,?, ...)]}', Script: '脚本', 'Please enter script(required)': '请输入脚本(必填)', 'Deploy Mode': '部署方式', 'Driver Cores': 'Driver核心数', 'Please enter Driver cores': '请输入Driver核心数', 'Driver Memory': 'Driver内存数', 'Please enter Driver memory': '请输入Driver内存数', 'Executor Number': 'Executor数量', 'Please enter Executor number': '请输入Executor数量', 'The Executor number should be a positive integer': 'Executor数量为正整数', 'Executor Memory': 'Executor内存数', 'Please enter Executor memory': '请输入Executor内存数', 'Executor Cores': 'Executor核心数', 'Please enter Executor cores': '请输入Executor核心数', 'Memory should be a positive integer': '内存数为数字', 'Core number should be positive integer': '核心数为正整数', 'Flink Version': 'Flink版本', 'JobManager Memory': 'JobManager内存数', 'Please enter JobManager memory': '请输入JobManager内存数', 'TaskManager Memory': 'TaskManager内存数', 'Please enter TaskManager memory': '请输入TaskManager内存数', 'Slot Number': 'Slot数量', 'Please enter Slot number': '请输入Slot数量', Parallelism: '并行度', 'Please enter Parallelism': '请输入并行度', 'TaskManager Number': 'TaskManager数量', 'Please enter TaskManager number': '请输入TaskManager数量', 'App Name': '任务名称', 'Please enter app name(optional)': '请输入任务名称(选填)', 'SQL Type': 'sql类型', 'Send Email': '发送邮件', 'Log display': '日志显示', 'rows of result': '行查询结果', 'Max Numbers Return': '返回的记录行数', 'Max Numbers Return placeholder': '默认值10000,如果值过大可能会对内存造成较大压力', 'Max Numbers Return required': '返回的记录行数值必须是一个在0-2147483647范围内的整数', Title: '主题', 'Please enter the title of email': '请输入邮件主题', Table: '表名', TableMode: '表格', Attachment: '附件', 'SQL Parameter': 'sql参数', 'SQL Statement': 'sql语句', 'UDF Function': 'UDF函数', 'Please enter a SQL Statement(required)': '请输入sql语句(必填)', 'Please enter a JSON Statement(required)': '请输入json语句(必填)', 'One form or attachment must be selected': '表格、附件必须勾选一个', 'Mail subject required': '邮件主题必填', 'Child Node': '子节点', 'Please select a sub-Process': '请选择子工作流', Edit: '编辑', 'Switch To This Version': '切换到该版本', 'Datasource Name': '数据源名称', 'Please enter datasource name': '请输入数据源名称', IP: 'IP主机名', 'Please enter IP': '请输入IP主机名', Port: '端口', 'Please enter port': '请输入端口', 'Database Name': '数据库名', 'Please enter database name': '请输入数据库名', 'Oracle Connect Type': '服务名或SID', 'Oracle Service Name': '服务名', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc连接参数', 'Test Connect': '测试连接', 'Please enter resource name': '请输入数据源名称', 'Please enter resource folder name': '请输入资源文件夹名称', 'Please enter a non-query SQL statement': '请输入非查询sql语句', 'Please enter IP/hostname': '请输入IP/主机名', 'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式', '#': '编号', 'Datasource Type': '数据源类型', 'Datasource Parameter': '数据源参数', 'Create Time': '创建时间', 'Update Time': '更新时间', Operation: '操作', 'Current Version': '当前版本', 'Click to view': '点击查看', 'Delete?': '确定删除吗?', 'Switch Version Successfully': '切换版本成功', 'Confirm Switch To This Version?': '确定切换到该版本吗?', Confirm: '确定', 'Task status statistics': '任务状态统计', Number: '数量', State: '状态', 'Process Status Statistics': '流程状态统计', 'Process Definition Statistics': '流程定义统计', 'Project Name': '项目名称', 'Please enter name': '请输入名称', 'Owned Users': '所属用户', 'Process Pid': '进程Pid', 'Zk registration directory': 'zk注册目录', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': '最后心跳时间', 'Edit Tenant': '编辑租户', 'OS Tenant Code': '操作系统租户', 'Tenant Name': '租户名称', Queue: '队列', 'Please select a queue': '默认为租户关联队列', 'Please enter the os tenant code in English': '请输入操作系统租户只允许英文', 'Please enter os tenant code in English': '请输入英文操作系统租户', 'Please enter os tenant code': '请输入操作系统租户', 'Please enter tenant Name': '请输入租户名称', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合', 'Edit User': '编辑用户', Tenant: '租户', Email: '邮件', Phone: '手机', 'User Type': '用户类型', 'Please enter phone number': '请输入手机', 'Please enter email': '请输入邮箱', 'Please enter the correct email format': '请输入正确的邮箱格式', 'Please enter the correct mobile phone format': '请输入正确的手机格式', Project: '项目', Authorize: '授权', 'File resources': '文件资源', 'UDF resources': 'UDF资源', 'UDF resources directory': 'UDF资源目录', 'Please select UDF resources directory': '请选择UDF资源目录', 'Alarm group': '告警组', 'Alarm group required': '告警组必填', 'Edit alarm group': '编辑告警组', 'Create alarm group': '创建告警组', 'Create Alarm Instance': '创建告警实例', 'Edit Alarm Instance': '编辑告警实例', 'Group Name': '组名称', 'Alarm instance name': '告警实例名称', 'Alarm plugin name': '告警插件名称', 'Select plugin': '选择插件', 'Select Alarm plugin': '请选择告警插件', 'Please enter group name': '请输入组名称', 'Instance parameter exception': '实例参数异常', 'Group Type': '组类型', 'Alarm plugin instance': '告警插件实例', 'Select Alarm plugin instance': '请选择告警插件实例', Remarks: '备注', SMS: '短信', 'Managing Users': '管理用户', Permission: '权限', Administrator: '管理员', 'Confirm Password': '确认密码', 'Please enter confirm password': '请输入确认密码', 'Password cannot be in Chinese': '密码不能为中文', 'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码', 'Confirmation password cannot be in Chinese': '确认密码不能为中文', 'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码', 'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认', 'Please select the datasource': '请选择数据源', 'Please select resources': '请选择资源', Query: '查询', 'Non Query': '非查询', 'prop(required)': 'prop(必填)', 'value(optional)': 'value(选填)', 'value(required)': 'value(必填)', 'prop is empty': 'prop不能为空', 'value is empty': 'value不能为空', 'prop is repeat': 'prop中有重复', 'Start Time': '开始时间', 'End Time': '结束时间', crontab: 'crontab', 'Failure Strategy': '失败策略', online: '上线', offline: '下线', 'Task Status': '任务状态', 'Process Instance': '工作流实例', 'Task Instance': '任务实例', 'Select date range': '选择日期区间', startDate: '开始日期', endDate: '结束日期', Date: '日期', Waiting: '等待', Execution: '执行中', Finish: '完成', 'Create File': '创建文件', 'Create folder': '创建文件夹', 'File Name': '文件名称', 'Folder Name': '文件夹名称', 'File Format': '文件格式', 'Folder Format': '文件夹格式', 'File Content': '文件内容', 'Upload File Size': '文件大小不能超过1G', Create: '创建', 'Please enter the resource content': '请输入资源内容', 'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行', 'File Details': '文件详情', 'Download Details': '下载详情', Return: '返回', Save: '保存', 'File Manage': '文件管理', 'Upload Files': '上传文件', 'Create UDF Function': '创建UDF函数', 'Upload UDF Resources': '上传UDF资源', 'Service-Master': '服务管理-Master', 'Service-Worker': '服务管理-Worker', 'Process Name': '工作流名称', Executor: '执行用户', 'Run Type': '运行类型', 'Scheduling Time': '调度时间', 'Run Times': '运行次数', host: 'host', 'fault-tolerant sign': '容错标识', Rerun: '重跑', 'Recovery Failed': '恢复失败', Stop: '停止', Pause: '暂停', 'Recovery Suspend': '恢复运行', Gantt: '甘特图', 'Node Type': '节点类型', 'Submit Time': '提交时间', Duration: '运行时长', 'Retry Count': '重试次数', 'Task Name': '任务名称', 'Task Date': '任务日期', 'Source Table': '源表', 'Record Number': '记录数', 'Target Table': '目标表', 'Online viewing type is not supported': '不支持在线查看类型', Size: '大小', Rename: '重命名', Download: '下载', Export: '导出', 'Version Info': '版本信息', Submit: '提交', 'Edit UDF Function': '编辑UDF函数', type: '类型', 'UDF Function Name': 'UDF函数名称', FILE: '文件', UDF: 'UDF', 'File Subdirectory': '文件子目录', 'Please enter a function name': '请输入函数名', 'Package Name': '包名类名', 'Please enter a Package name': '请输入包名类名', Parameter: '参数', 'Please enter a parameter': '请输入参数', 'UDF Resources': 'UDF资源', 'Upload Resources': '上传资源', Instructions: '使用说明', 'Please enter a instructions': '请输入使用说明', 'Please enter a UDF function name': '请输入UDF函数名称', 'Select UDF Resources': '请选择UDF资源', 'Class Name': '类名', 'Jar Package': 'jar包', 'Library Name': '库名', 'UDF Resource Name': 'UDF资源名称', 'File Size': '文件大小', Description: '描述', 'Drag Nodes and Selected Items': '拖动节点和选中项', 'Select Line Connection': '选择线条连接', 'Delete selected lines or nodes': '删除选中的线或节点', 'Full Screen': '全屏', Unpublished: '未发布', 'Start Process': '启动工作流', 'Execute from the current node': '从当前节点开始执行', 'Recover tolerance fault process': '恢复被容错的工作流', 'Resume the suspension process': '恢复运行流程', 'Execute from the failed nodes': '从失败节点开始执行', 'Complement Data': '补数', 'Scheduling execution': '调度执行', 'Recovery waiting thread': '恢复等待线程', 'Submitted successfully': '提交成功', Executing: '正在执行', 'Ready to pause': '准备暂停', 'Ready to stop': '准备停止', 'Need fault tolerance': '需要容错', Kill: 'Kill', 'Waiting for thread': '等待线程', 'Waiting for dependence': '等待依赖', Start: '运行', Copy: '复制节点', 'Copy name': '复制名称', 'Copy path': '复制路径', 'Please enter keyword': '请输入关键词', 'File Upload': '文件上传', 'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!', 'Drag area upload': '拖动区域上传', Upload: '上传', 'ReUpload File': '重新上传文件', 'Please enter file name': '请输入文件名', 'Please select the file to upload': '请选择要上传的文件', 'Resources manage': '资源中心', Security: '安全中心', Logout: '退出', 'No data': '查询无数据', 'Uploading...': '文件上传中', 'Loading...': '正在努力加载中...', List: '列表', 'Unable to download without proper url': '无下载url无法下载', Process: '工作流', 'Process definition': '工作流定义', 'Task record': '任务记录', 'Warning group manage': '告警组管理', 'Warning instance manage': '告警实例管理', 'Servers manage': '服务管理', 'UDF manage': 'UDF管理', 'Resource manage': '资源管理', 'Function manage': '函数管理', 'Edit password': '修改密码', 'Ordinary users': '普通用户', 'Create process': '创建工作流', 'Import process': '导入工作流', 'Timing state': '定时状态', Timing: '定时', Timezone: '时区', TreeView: '树形图', 'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复', 'Mailbox input is illegal': '邮箱输入不合法', 'Please set the parameters before starting': '启动前请先设置参数', Continue: '继续', End: '结束', 'Node execution': '节点执行', 'Backward execution': '向后执行', 'Forward execution': '向前执行', 'Execute only the current node': '仅执行当前节点', 'Notification strategy': '通知策略', 'Notification group': '通知组', 'Please select a notification group': '请选择通知组', receivers: '收件人', receiverCcs: '抄送人', 'Whether it is a complement process?': '是否补数', 'Schedule date': '调度日期', 'Mode of execution': '执行方式', 'Serial execution': '串行执行', 'Parallel execution': '并行执行', 'Set parameters before timing': '定时前请先设置参数', 'Start and stop time': '起止时间', 'Please select time': '请选择时间', 'Please enter crontab': '请输入crontab', none_1: '都不发', success_1: '成功发', failure_1: '失败发', All_1: '成功或失败都发', Toolbar: '工具栏', 'View variables': '查看变量', 'Format DAG': '格式化DAG', 'Refresh DAG status': '刷新DAG状态', Return_1: '返回上一节点', 'Please enter format': '请输入格式为', 'connection parameter': '连接参数', 'Process definition details': '流程定义详情', 'Create process definition': '创建流程定义', 'Scheduled task list': '定时任务列表', 'Process instance details': '流程实例详情', 'Create Resource': '创建资源', 'User Center': '用户中心', AllStatus: '全部状态', None: '无', Name: '名称', 'Process priority': '流程优先级', 'Task priority': '任务优先级', 'Task timeout alarm': '任务超时告警', 'Timeout strategy': '超时策略', 'Timeout alarm': '超时告警', 'Timeout failure': '超时失败', 'Timeout period': '超时时长', 'Waiting Dependent complete': '等待依赖完成', 'Waiting Dependent start': '等待依赖启动', 'Check interval': '检查间隔', 'Timeout must be longer than check interval': '超时时间必须比检查间隔长', 'Timeout strategy must be selected': '超时策略必须选一个', 'Timeout must be a positive integer': '超时时长必须为正整数', 'Add dependency': '添加依赖', and: '且', or: '或', month: '月', week: '周', day: '日', hour: '时', Running: '正在运行', 'Waiting for dependency to complete': '等待依赖完成', Selected: '已选', CurrentHour: '当前小时', Last1Hour: '前1小时', Last2Hours: '前2小时', Last3Hours: '前3小时', Last24Hours: '前24小时', today: '今天', Last1Days: '昨天', Last2Days: '前两天', Last3Days: '前三天', Last7Days: '前七天', ThisWeek: '本周', LastWeek: '上周', LastMonday: '上周一', LastTuesday: '上周二', LastWednesday: '上周三', LastThursday: '上周四', LastFriday: '上周五', LastSaturday: '上周六', LastSunday: '上周日', ThisMonth: '本月', LastMonth: '上月', LastMonthBegin: '上月初', LastMonthEnd: '上月末', 'Refresh status succeeded': '刷新状态成功', 'Queue manage': 'Yarn 队列管理', 'Create queue': '创建队列', 'Edit queue': '编辑队列', 'Datasource manage': '数据源中心', 'History task record': '历史任务记录', 'Please go online': '不要忘记上线', 'Queue value': '队列值', 'Please enter queue value': '请输入队列值', 'Worker group manage': 'Worker分组管理', 'Create worker group': '创建Worker分组', 'Edit worker group': '编辑Worker分组', 'Token manage': '令牌管理', 'Create token': '创建令牌', 'Edit token': '编辑令牌', Addresses: '地址', 'Worker Addresses': 'Worker地址', 'Please select the worker addresses': '请选择Worker地址', 'Failure time': '失效时间', 'Expiration time': '失效时间', User: '用户', 'Please enter token': '请输入令牌', 'Generate token': '生成令牌', Monitor: '监控中心', Group: '分组', 'Queue statistics': '队列统计', 'Command status statistics': '命令状态统计', 'Task kill': '等待kill任务', 'Task queue': '等待执行任务', 'Error command count': '错误指令数', 'Normal command count': '正确指令数', Manage: '管理', 'Number of connections': '连接数', Sent: '发送量', Received: '接收量', 'Min latency': '最低延时', 'Avg latency': '平均延时', 'Max latency': '最大延时', 'Node count': '节点数', 'Query time': '当前查询时间', 'Node self-test status': '节点自检状态', 'Health status': '健康状态', 'Max connections': '最大连接数', 'Threads connections': '当前连接数', 'Max used connections': '同时使用连接最大数', 'Threads running connections': '数据库当前活跃连接数', 'Worker group': 'Worker分组', 'Please enter a positive integer greater than 0': '请输入大于 0 的正整数', 'Pre Statement': '前置sql', 'Post Statement': '后置sql', 'Statement cannot be empty': '语句不能为空', 'Process Define Count': '工作流定义数', 'Process Instance Running Count': '正在运行的流程数', 'command number of waiting for running': '待执行的命令数', 'failure command number': '执行失败的命令数', 'tasks number of waiting running': '待运行任务数', 'task number of ready to kill': '待杀死任务数', 'Statistics manage': '统计管理', statistics: '统计', 'select tenant': '选择租户', 'Please enter Principal': '请输入Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path', 'The start time must not be the same as the end': '开始时间和结束时间不能相同', 'Startup parameter': '启动参数', 'Startup type': '启动类型', 'warning of timeout': '超时告警', 'Next five execution times': '接下来五次执行时间', 'Execute time': '执行时间', 'Complement range': '补数范围', 'Http Url': '请求地址', 'Http Method': '请求类型', 'Http Parameters': '请求参数', 'Http Parameters Key': '参数名', 'Http Parameters Position': '参数位置', 'Http Parameters Value': '参数值', 'Http Check Condition': '校验条件', 'Http Condition': '校验内容', 'Please Enter Http Url': '请填写请求地址(必填)', 'Please Enter Http Condition': '请填写校验内容', 'There is no data for this period of time': '该时间段无数据', 'Worker addresses cannot be empty': 'Worker地址不能为空', 'Please generate token': '请生成Token', 'Spark Version': 'Spark版本', TargetDataBase: '目标库', TargetTable: '目标表', 'Please enter the table of target': '请输入目标表名', 'Please enter a Target Table(required)': '请输入目标表(必填)', SpeedByte: '限流(字节数)', SpeedRecord: '限流(记录数)', '0 means unlimited by byte': 'KB,0代表不限制', '0 means unlimited by count': '0代表不限制', 'Modify User': '修改用户', 'Whether directory': '是否文件夹', Yes: '是', No: '否', 'Hadoop Custom Params': 'Hadoop参数', 'Sqoop Advanced Parameters': 'Sqoop参数', 'Sqoop Job Name': '任务名称', 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', 'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开', 'Please enter Target Dir(required)': '请输入目标路径(必填)', 'Please enter Export Dir(required)': '请输入数据源路径(必填)', 'Please enter Hive Database(required)': '请输入Hive数据库(必填)', 'Please enter Hive Table(required)': '请输入Hive表名(必填)', 'Please enter Hive Partition Keys': '请输入分区键', 'Please enter Hive Partition Values': '请输入分区值', 'Please enter Replace Delimiter': '请输入替换分隔符', 'Please enter Fields Terminated': '请输入列分隔符', 'Please enter Lines Terminated': '请输入行分隔符', 'Please enter Concurrency': '请输入并发度', 'Please enter Update Key': '请输入更新列', 'Please enter Job Name(required)': '请输入任务名称(必填)', 'Please enter Custom Shell(required)': '请输入自定义脚本', Direct: '流向', Type: '类型', ModelType: '模式', ColumnType: '列类型', Database: '数据库', Column: '列', 'Map Column Hive': 'Hive类型映射', 'Map Column Java': 'Java类型映射', 'Export Dir': '数据源路径', 'Hive partition Keys': 'Hive 分区键', 'Hive partition Values': 'Hive 分区值', FieldsTerminated: '列分隔符', LinesTerminated: '行分隔符', IsUpdate: '是否更新', UpdateKey: '更新列', UpdateMode: '更新类型', 'Target Dir': '目标路径', DeleteTargetDir: '是否删除目录', FileType: '保存格式', CompressionCodec: '压缩类型', CreateHiveTable: '是否创建新表', DropDelimiter: '是否删除分隔符', OverWriteSrc: '是否覆盖数据源', ReplaceDelimiter: '替换分隔符', Concurrency: '并发度', Form: '表单', OnlyUpdate: '只更新', AllowInsert: '无更新便插入', 'Data Source': '数据来源', 'Data Target': '数据目的', 'All Columns': '全表导入', 'Some Columns': '选择列', 'Branch flow': '分支流转', 'Custom Job': '自定义任务', 'Custom Script': '自定义脚本', 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填', 'No resources exist': '不存在资源', 'Please delete all non-existing resources': '请删除所有不存在资源', 'Unauthorized or deleted resources': '未授权或已删除资源', 'Please delete all non-existent resources': '请删除所有未授权或已删除资源', Kinship: '工作流关系', Reset: '重置', KinshipStateActive: '当前选择', KinshipState1: '已上线', KinshipState0: '工作流未上线', KinshipState10: '调度未上线', 'Dag label display control': 'Dag节点名称显隐', Enable: '启用', Disable: '停用', 'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!', 'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存', 'User name length is between 3 and 39': '用户名长度在3~39之间', 'Timeout Settings': '超时设置', 'Connect Timeout': '连接超时', 'Socket Timeout': 'Socket超时', 'Connect timeout be a positive integer': '连接超时必须为数字', 'Socket Timeout be a positive integer': 'Socket超时必须为数字', ms: '毫秒', 'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': '请选择waterdrop配置文件', zkDirectory: 'zk注册目录', 'Directory detail': '查看目录详情', 'Connection name': '连线名', 'Current connection settings': '当前连线设置', 'Please save the DAG before formatting': '格式化前请先保存DAG', 'Batch copy': '批量复制', 'Related items': '关联项目', 'Project name is required': '项目名称必填', 'Batch move': '批量移动', Version: '版本', 'Pre tasks': '前置任务', 'Running Memory': '运行内存', 'Max Memory': '最大内存', 'Min Memory': '最小内存', 'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建', Info: '提示', 'Datasource userName': '所属用户', 'Resource userName': '所属用户' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,737
[Bug][Datasource] datsource other param check error
when add a mysql datasource, I wan't to set connec param with `serverTimezone=Asia/Shanghai` so add other param with json format `{"serverTimezone":"Asia/Shanghai"}` but the check rule make the param not working. the `Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9]+$");` make '/' illegal.
https://github.com/apache/dolphinscheduler/issues/5737
https://github.com/apache/dolphinscheduler/pull/5835
9ae2266cd40071db86cd02da829e39529f74fbeb
2df6ee1efbe4aec0f5579315a1b19e247f4115a6
"2021-07-02T06:02:42Z"
java
"2021-07-18T13:46:58Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.datasource; import org.apache.commons.collections4.MapUtils; import java.util.Map; import java.util.regex.Pattern; public abstract class AbstractDatasourceProcessor implements DatasourceProcessor { private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$"); private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); private static final Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9]+$"); @Override public void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) { checkHost(baseDataSourceParamDTO.getHost()); checkDatasourcePatter(baseDataSourceParamDTO.getDatabase()); checkOther(baseDataSourceParamDTO.getOther()); } /** * Check the host is valid * * @param host datasource host */ protected void checkHost(String host) { if (!IPV4_PATTERN.matcher(host).matches() || !IPV6_PATTERN.matcher(host).matches()) { throw new IllegalArgumentException("datasource host illegal"); } } /** * check database name is valid * * @param database database name */ protected void checkDatasourcePatter(String database) { if (!DATABASE_PATTER.matcher(database).matches()) { throw new IllegalArgumentException("datasource name illegal"); } } /** * check other is valid * * @param other other */ protected void checkOther(Map<String, String> other) { if (MapUtils.isEmpty(other)) { return; } boolean paramsCheck = other.entrySet().stream().allMatch(p -> PARAMS_PATTER.matcher(p.getValue()).matches()); if (!paramsCheck) { throw new IllegalArgumentException("datasource other params illegal"); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,737
[Bug][Datasource] datsource other param check error
when add a mysql datasource, I wan't to set connec param with `serverTimezone=Asia/Shanghai` so add other param with json format `{"serverTimezone":"Asia/Shanghai"}` but the check rule make the param not working. the `Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9]+$");` make '/' illegal.
https://github.com/apache/dolphinscheduler/issues/5737
https://github.com/apache/dolphinscheduler/pull/5835
9ae2266cd40071db86cd02da829e39529f74fbeb
2df6ee1efbe4aec0f5579315a1b19e247f4115a6
"2021-07-02T06:02:42Z"
java
"2021-07-18T13:46:58Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.datasource; import org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam; import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO; import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest({Class.class, DriverManager.class, MysqlDatasourceProcessor.class}) public class DatasourceUtilTest { @Test public void testCheckDatasourceParam() { MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO(); mysqlDatasourceParamDTO.setHost("localhost"); mysqlDatasourceParamDTO.setDatabase("default"); mysqlDatasourceParamDTO.setOther(null); DatasourceUtil.checkDatasourceParam(mysqlDatasourceParamDTO); Assert.assertTrue(true); } @Test public void testBuildConnectionParams() { MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO(); mysqlDatasourceParamDTO.setHost("localhost"); mysqlDatasourceParamDTO.setDatabase("default"); mysqlDatasourceParamDTO.setUserName("root"); mysqlDatasourceParamDTO.setPort(3306); mysqlDatasourceParamDTO.setPassword("123456"); ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO); Assert.assertNotNull(connectionParam); } @Test public void testBuildConnectionParams2() { MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO(); mysqlDatasourceParamDTO.setHost("localhost"); mysqlDatasourceParamDTO.setDatabase("default"); mysqlDatasourceParamDTO.setUserName("root"); mysqlDatasourceParamDTO.setPort(3306); mysqlDatasourceParamDTO.setPassword("123456"); ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(DbType.MYSQL, JSONUtils.toJsonString(mysqlDatasourceParamDTO)); Assert.assertNotNull(connectionParam); } @Test public void testGetConnection() throws ClassNotFoundException, SQLException { PowerMockito.mockStatic(Class.class); PowerMockito.when(Class.forName(Mockito.any())).thenReturn(null); PowerMockito.mockStatic(DriverManager.class); PowerMockito.when(DriverManager.getConnection(Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(null); MysqlConnectionParam connectionParam = new MysqlConnectionParam(); connectionParam.setUser("root"); connectionParam.setPassword("123456"); Connection connection = DatasourceUtil.getConnection(DbType.MYSQL, connectionParam); Assert.assertNull(connection); } @Test public void testGetJdbcUrl() { MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam(); mysqlConnectionParam.setJdbcUrl("jdbc:mysql://localhost:3308"); String jdbcUrl = DatasourceUtil.getJdbcUrl(DbType.MYSQL, mysqlConnectionParam); Assert.assertEquals("jdbc:mysql://localhost:3308?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false", jdbcUrl); } @Test public void testBuildDatasourceParamDTO() { MysqlConnectionParam connectionParam = new MysqlConnectionParam(); connectionParam.setJdbcUrl("jdbc:mysql://localhost:3308?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false"); connectionParam.setAddress("jdbc:mysql://localhost:3308"); connectionParam.setUser("root"); connectionParam.setPassword("123456"); Assert.assertNotNull(DatasourceUtil.buildDatasourceParamDTO(DbType.MYSQL, JSONUtils.toJsonString(connectionParam))); } @Test public void testGetDatasourceProcessor() { Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.MYSQL)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.POSTGRESQL)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.HIVE)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.SPARK)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.CLICKHOUSE)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.ORACLE)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.SQLSERVER)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.DB2)); Assert.assertNotNull(DatasourceUtil.getDatasourceProcessor(DbType.PRESTO)); } @Test(expected = Exception.class) public void testGetDatasourceProcessorError() { DatasourceUtil.getDatasourceProcessor(null); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR; import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * work flow lineage controller */ @Api(tags = "WORK_FLOW_LINEAGE_TAG") @RestController @RequestMapping("lineages/{projectId}") public class WorkFlowLineageController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageController.class); @Autowired private WorkFlowLineageService workFlowLineageService; @ApiOperation(value = "queryWorkFlowLineageByName", notes = "QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES") @GetMapping(value = "/list-name") @ResponseStatus(HttpStatus.OK) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result<List<WorkFlowLineage>> queryWorkFlowLineageByName(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @ApiIgnore @RequestParam(value = "searchVal", required = false) String searchVal) { try { searchVal = ParameterUtils.handleEscapes(searchVal); Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId); return returnDataList(result); } catch (Exception e) { logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } @ApiOperation(value = "queryWorkFlowLineageByIds", notes = "QUERY_WORKFLOW_LINEAGE_BY_IDS_NOTES") @GetMapping(value = "/list-ids") @ResponseStatus(HttpStatus.OK) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result<Map<String, Object>> queryWorkFlowLineageByIds(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, @ApiIgnore @RequestParam(value = "ids", required = false) String ids) { try { ids = ParameterUtils.handleEscapes(ids); Set<Integer> idsSet = new HashSet<>(); if (ids != null) { String[] idsStr = ids.split(","); for (String id : idsStr) { idsSet.add(Integer.parseInt(id)); } } Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(idsSet, projectId); return returnDataList(result); } catch (Exception e) { logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import java.util.Map; import java.util.Set; /** * work flow lineage service */ public interface WorkFlowLineageService { Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId); Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessLineage; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * work flow lineage service impl */ @Service public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService { @Autowired private WorkFlowLineageMapper workFlowLineageMapper; @Autowired private ProjectMapper projectMapper; @Override public Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId) { Project project = projectMapper.selectById(projectId); Map<String, Object> result = new HashMap<>(); List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, project.getCode()); result.put(Constants.DATA_LIST, workFlowLineageList); putMsg(result, Status.SUCCESS); return result; } private void getRelation(Map<Integer, WorkFlowLineage> workFlowLineageMap, Set<WorkFlowRelation> workFlowRelations, ProcessLineage processLineage) { List<ProcessLineage> relations = workFlowLineageMapper.queryCodeRelation( processLineage.getPostTaskCode(), processLineage.getPostTaskVersion(), processLineage.getProcessDefinitionCode(), processLineage.getProjectCode()); if (!relations.isEmpty()) { Set<Integer> preWorkFlowIds = new HashSet<>(); List<ProcessLineage> preRelations = workFlowLineageMapper.queryCodeRelation( processLineage.getPreTaskCode(), processLineage.getPreTaskVersion(), processLineage.getProcessDefinitionCode(), processLineage.getProjectCode()); for (ProcessLineage preRelation : preRelations) { WorkFlowLineage pre = workFlowLineageMapper.queryWorkFlowLineageByCode( preRelation.getProcessDefinitionCode(), preRelation.getProjectCode()); preWorkFlowIds.add(pre.getWorkFlowId()); } ProcessLineage postRelation = relations.get(0); WorkFlowLineage post = workFlowLineageMapper.queryWorkFlowLineageByCode( postRelation.getProcessDefinitionCode(), postRelation.getProjectCode()); if (!workFlowLineageMap.containsKey(post.getWorkFlowId())) { post.setSourceWorkFlowId(StringUtils.join(preWorkFlowIds, ",")); workFlowLineageMap.put(post.getWorkFlowId(), post); } else { WorkFlowLineage workFlowLineage = workFlowLineageMap.get(post.getWorkFlowId()); String sourceWorkFlowId = workFlowLineage.getSourceWorkFlowId(); if (sourceWorkFlowId.equals("")) { workFlowLineage.setSourceWorkFlowId(StringUtils.join(preWorkFlowIds, ",")); } else { if (!preWorkFlowIds.isEmpty()) { workFlowLineage.setSourceWorkFlowId(sourceWorkFlowId + "," + StringUtils.join(preWorkFlowIds, ",")); } } } if (preWorkFlowIds.isEmpty()) { workFlowRelations.add(new WorkFlowRelation(0, post.getWorkFlowId())); } else { for (Integer workFlowId : preWorkFlowIds) { workFlowRelations.add(new WorkFlowRelation(workFlowId, post.getWorkFlowId())); } } } } @Override public Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids, int projectId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.selectById(projectId); List<ProcessLineage> processLineages = workFlowLineageMapper.queryRelationByIds(ids, project.getCode()); Map<Integer, WorkFlowLineage> workFlowLineages = new HashMap<>(); Set<WorkFlowRelation> workFlowRelations = new HashSet<>(); for (ProcessLineage processLineage : processLineages) { getRelation(workFlowLineages, workFlowRelations, processLineage); } Map<String, Object> workFlowLists = new HashMap<>(); workFlowLists.put(Constants.WORKFLOW_LIST, workFlowLineages.values()); workFlowLists.put(Constants.WORKFLOW_RELATION_LIST, workFlowRelations); result.put(Constants.DATA_LIST, workFlowLists); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.WorkFlowLineageServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.text.MessageFormat; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.springframework.context.ApplicationContext; /** * work flow lineage controller test */ public class WorkFlowLineageControllerTest extends AbstractControllerTest { @InjectMocks private WorkFlowLineageController workFlowLineageController; @Mock private WorkFlowLineageServiceImpl workFlowLineageService; @Before public void init() { ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); ProjectMapper projectMapper = Mockito.mock(ProjectMapper.class); Mockito.when(applicationContext.getBean(ProjectMapper.class)).thenReturn(projectMapper); Project project = new Project(); project.setId(1); project.setCode(1L); Mockito.when(projectMapper.selectById(1)).thenReturn(project); } @Test public void testQueryWorkFlowLineageByName() { int projectId = 1; String searchVal = "test"; Map<String, Object> result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, 1); Mockito.when(workFlowLineageService.queryWorkFlowLineageByName(searchVal, projectId)).thenReturn(result); Result response = workFlowLineageController.queryWorkFlowLineageByName(user, projectId, searchVal); Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test public void testQueryWorkFlowLineageByIds() { int projectId = 1; String ids = "1"; Map<String, Object> result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, 1); Set<Integer> idSet = new HashSet<>(); idSet.add(1); Mockito.when(workFlowLineageService.queryWorkFlowLineageByIds(idSet, projectId)).thenReturn(result); Result response = workFlowLineageController.queryWorkFlowLineageByIds(user, projectId, ids); Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkFlowLineageServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.service.impl.WorkFlowLineageServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.ProcessLineage; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** * work flow lineage service test */ @RunWith(MockitoJUnitRunner.class) public class WorkFlowLineageServiceTest { @InjectMocks private WorkFlowLineageServiceImpl workFlowLineageService; @Mock private WorkFlowLineageMapper workFlowLineageMapper; @Mock private ProjectMapper projectMapper; /** * get mock Project * * @param projectName projectName * @return Project */ private Project getProject(String projectName) { Project project = new Project(); project.setCode(1L); project.setId(1); project.setName(projectName); project.setUserId(1); return project; } @Test public void testQueryWorkFlowLineageByName() { Project project = getProject("test"); String searchVal = "test"; when(projectMapper.selectById(1)).thenReturn(project); when(workFlowLineageMapper.queryByName(Mockito.any(), Mockito.any())).thenReturn(getWorkFlowLineages()); Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal, 1); List<WorkFlowLineage> workFlowLineageList = (List<WorkFlowLineage>) result.get(Constants.DATA_LIST); Assert.assertTrue(workFlowLineageList.size() > 0); } @Test public void testQueryWorkFlowLineageByIds() { Set<Integer> ids = new HashSet<>(); ids.add(1); ids.add(2); Project project = getProject("test"); List<ProcessLineage> processLineages = new ArrayList<>(); ProcessLineage processLineage = new ProcessLineage(); processLineage.setPreTaskVersion(1); processLineage.setPreTaskCode(1L); processLineage.setPostTaskCode(2L); processLineage.setPostTaskVersion(1); processLineage.setProcessDefinitionCode(1111L); processLineage.setProcessDefinitionVersion(1); processLineage.setProjectCode(1111L); processLineages.add(processLineage); WorkFlowLineage workFlowLineage = new WorkFlowLineage(); workFlowLineage.setSourceWorkFlowId(""); when(projectMapper.selectById(1)).thenReturn(project); when(workFlowLineageMapper.queryRelationByIds(ids, project.getCode())).thenReturn(processLineages); when(workFlowLineageMapper.queryCodeRelation(processLineage.getPostTaskCode() , processLineage.getPreTaskVersion() , processLineage.getProcessDefinitionCode() , processLineage.getProjectCode())) .thenReturn(processLineages); when(workFlowLineageMapper .queryWorkFlowLineageByCode(processLineage.getProcessDefinitionCode(), processLineage.getProjectCode())) .thenReturn(workFlowLineage); Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(ids, 1); Map<String, Object> workFlowLists = (Map<String, Object>) result.get(Constants.DATA_LIST); Collection<WorkFlowLineage> workFlowLineages = (Collection<WorkFlowLineage>) workFlowLists.get(Constants.WORKFLOW_LIST); Set<WorkFlowRelation> workFlowRelations = (Set<WorkFlowRelation>) workFlowLists.get(Constants.WORKFLOW_RELATION_LIST); Assert.assertTrue(workFlowLineages.size() > 0); Assert.assertTrue(workFlowRelations.size() > 0); } private List<WorkFlowLineage> getWorkFlowLineages() { List<WorkFlowLineage> workFlowLineages = new ArrayList<>(); WorkFlowLineage workFlowLineage = new WorkFlowLineage(); workFlowLineage.setWorkFlowId(1); workFlowLineage.setWorkFlowName("testdag"); workFlowLineages.add(workFlowLineage); return workFlowLineages; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,517
[Feature][JsonSplit-api]WorkFlowLineage interface
from #5498 Change the request parameter projectId to projectCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5517
https://github.com/apache/dolphinscheduler/pull/5834
741d757dcb8c80c97c32d314c0fb5da08ebad5ea
901bc9a43cc3f6f48a9681b1884a714c143759f1
"2021-05-18T14:02:08Z"
java
"2021-07-19T01:49:40Z"
dolphinscheduler-ui/src/js/conf/home/store/kinship/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import localStore from '@/module/util/localStorage' export default { /** * Get workFlow DAG */ getWorkFlowList ({ state }, payload) { const projectId = localStore.getItem('projectId') return new Promise((resolve, reject) => { const url = `lineages/${projectId}/list-name` io.get(url, { searchVal: payload }, res => { const workList = [] if (res.data) { _.map(res.data, (item) => { workList.push({ id: `${item.workFlowId}`, name: item.workFlowName }) }) } state.workList = workList /* JSON.parse(connects) */ resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get workFlow DAG */ getWorkFlowDAG ({ state }, payload) { const projectId = localStore.getItem('projectId') return new Promise((resolve, reject) => { const url = `lineages/${projectId}/list-ids` io.get(url, { ids: payload }, res => { let locations = [] let connects = [] if (res.data.workFlowList) { locations = _.uniqBy(res.data.workFlowList, 'workFlowId').map((item) => ({ id: `${item.workFlowId}`, name: item.workFlowName, workFlowPublishStatus: item.workFlowPublishStatus, scheduleStartTime: item.scheduleStartTime, scheduleEndTime: item.scheduleEndTime, crontab: item.crontab, schedulePublishStatus: item.schedulePublishStatus })) } if (res.data.workFlowRelationList) { connects = _.map(res.data.workFlowRelationList, (item) => ({ source: `${item.sourceWorkFlowId}`, // should be string, or connects will not show by echarts target: `${item.targetWorkFlowId}` // should be string, or connects will not show by echarts })) } state.sourceWorkFlowId = payload || '' // locations state.locations = locations /* JSON.parse(locations) */ // connects state.connects = connects /* JSON.parse(connects) */ resolve(res.data) }).catch(res => { reject(res) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,719
[Bug][K8s] Ingress ERROR io.k8s.api.networking.v1beta1.IngressSpec.tls: got "map", expected "array" On TLS enabled
Ingress ERROR io.k8s.api.networking.v1beta1.IngressSpec.tls: got "map", expected "array" On TLS enabled **Expected behavior** Bug fixed **Which version of Dolphin Scheduler:** -[dev] -[1.3.6] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5719
https://github.com/apache/dolphinscheduler/pull/5718
2df6ee1efbe4aec0f5579315a1b19e247f4115a6
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
"2021-06-29T14:42:43Z"
java
"2021-07-20T03:21:34Z"
docker/kubernetes/dolphinscheduler/templates/ingress.yaml
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # {{- if .Values.ingress.enabled }} {{- if .Capabilities.APIVersions.Has "networking.k8s.io/v1/Ingress" }} apiVersion: networking.k8s.io/v1 {{- else if .Capabilities.APIVersions.Has "networking.k8s.io/v1beta1/Ingress" }} apiVersion: networking.k8s.io/v1beta1 {{- else }} apiVersion: extensions/v1beta1 {{- end }} kind: Ingress metadata: name: {{ include "dolphinscheduler.fullname" . }} labels: app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }} {{- include "dolphinscheduler.common.labels" . | nindent 4 }} spec: rules: - host: {{ .Values.ingress.host }} http: paths: - path: {{ .Values.ingress.path }} backend: {{- if .Capabilities.APIVersions.Has "networking.k8s.io/v1/Ingress" }} service: name: {{ include "dolphinscheduler.fullname" . }}-api port: name: api-port {{- else }} serviceName: {{ include "dolphinscheduler.fullname" . }}-api servicePort: api-port {{- end }} {{- if .Capabilities.APIVersions.Has "networking.k8s.io/v1/Ingress" }} pathType: Prefix {{- end }} {{- if .Values.ingress.tls.enabled }} tls: hosts: - {{ .Values.ingress.host }} secretName: {{ .Values.ingress.tls.secretName }} {{- end }} {{- end }}
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CHECK_PROCESS_DEFINITION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.EXECUTE_PROCESS_INSTANCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.START_PROCESS_INSTANCE_ERROR; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * executor controller */ @Api(tags = "EXECUTOR_TAG") @RestController @RequestMapping("projects/{projectName}/executors") public class ExecutorController extends BaseController { @Autowired private ExecutorService execService; /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process definition id * @param scheduleTime schedule time * @param failureStrategy failure strategy * @param startNodeList start nodes list * @param taskDependType task depend type * @param execType execute type * @param warningType warning type * @param warningGroupId warning group id * @param runMode run mode * @param processInstancePriority process instance priority * @param workerGroup worker group * @param timeout timeout * @return start process result code */ @ApiOperation(value = "startProcessInstance", notes = "RUN_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"), @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"), @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"), @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"), @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"), }) @PostMapping(value = "start-process-instance") @ResponseStatus(HttpStatus.OK) @ApiException(START_PROCESS_INSTANCE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionId") int processDefinitionId, @RequestParam(value = "scheduleTime", required = false) String scheduleTime, @RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, @RequestParam(value = "startNodeList", required = false) String startNodeList, @RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, @RequestParam(value = "execType", required = false) CommandType execType, @RequestParam(value = "warningType", required = true) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "runMode", required = false) RunMode runMode, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "timeout", required = false) Integer timeout, @RequestParam(value = "startParams", required = false) String startParams) { if (timeout == null) { timeout = Constants.MAX_TASK_TIMEOUT; } Map<String, String> startParamMap = null; if (startParams != null) { startParamMap = JSONUtils.toMap(startParams); } Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, startNodeList, taskDependType, warningType, warningGroupId, runMode, processInstancePriority, workerGroup, timeout, startParamMap); return returnDataList(result); } /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ @ApiOperation(value = "execute", notes = "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType") }) @PostMapping(value = "/execute") @ResponseStatus(HttpStatus.OK) @ApiException(EXECUTE_PROCESS_INSTANCE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("processInstanceId") Integer processInstanceId, @RequestParam("executeType") ExecuteType executeType ) { Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType); return returnDataList(result); } /** * check process definition and all of the son process definitions is on line. * * @param loginUser login user * @param processDefinitionId process definition id * @return check result code */ @ApiOperation(value = "startCheckProcessDefinition", notes = "START_CHECK_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/start-check") @ResponseStatus(HttpStatus.OK) @ApiException(CHECK_PROCESS_DEFINITION_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "processDefinitionId") int processDefinitionId) { Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * executor service */ public interface ExecutorService { /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process Definition Id * @param cronTime cron time * @param commandType command type * @param failureStrategy failuer strategy * @param startNodeList start nodelist * @param taskDependType node dependency type * @param warningType warning type * @param warningGroupId notify group id * @param processInstancePriority process instance priority * @param workerGroup worker group name * @param runMode run mode * @param timeout timeout * @param startParams the global param values which pass to new process instance * @return execute process instance code */ Map<String, Object> execProcessInstance(User loginUser, String projectName, int processDefinitionId, String cronTime, CommandType commandType, FailureStrategy failureStrategy, String startNodeList, TaskDependType taskDependType, WarningType warningType, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Integer timeout, Map<String, String> startParams); /** * check whether the process definition can be executed * * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode); /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType); /** * check if sub processes are offline before starting process definition * * @param processDefineId process definition id * @return check result code */ Map<String, Object> startCheckByProcessDefinedId(int processDefineId); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.commons.collections.MapUtils; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * executor service impl */ @Service public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService { private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class); @Autowired private ProjectMapper projectMapper; @Autowired private ProjectService projectService; @Autowired private ProcessDefinitionMapper processDefinitionMapper; @Autowired private MonitorService monitorService; @Autowired private ProcessInstanceMapper processInstanceMapper; @Autowired private ProcessService processService; /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process Definition Id * @param cronTime cron time * @param commandType command type * @param failureStrategy failuer strategy * @param startNodeList start nodelist * @param taskDependType node dependency type * @param warningType warning type * @param warningGroupId notify group id * @param processInstancePriority process instance priority * @param workerGroup worker group name * @param runMode run mode * @param timeout timeout * @param startParams the global param values which pass to new process instance * @return execute process instance code */ @Override public Map<String, Object> execProcessInstance(User loginUser, String projectName, int processDefinitionId, String cronTime, CommandType commandType, FailureStrategy failureStrategy, String startNodeList, TaskDependType taskDependType, WarningType warningType, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Integer timeout, Map<String, String> startParams) { Map<String, Object> result = new HashMap<>(); // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); return result; } Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); if (checkResultAndAuth != null) { return checkResultAndAuth; } // check process define release state ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); result = checkProcessDefinitionValid(processDefinition, processDefinitionId); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); return result; } // check master exists if (!checkMasterExists(result)) { return result; } /** * create command */ int create = this.createCommand(commandType, processDefinitionId, taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), warningGroupId, runMode, processInstancePriority, workerGroup, startParams); if (create > 0) { processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); } return result; } /** * check whether master exists * * @param result result * @return master exists return true , otherwise return false */ private boolean checkMasterExists(Map<String, Object> result) { // check master server exists List<Server> masterServers = monitorService.getServerListFromRegistry(true); // no master if (masterServers.isEmpty()) { putMsg(result, Status.MASTER_NOT_EXISTS); return false; } return true; } /** * check whether the process definition can be executed * * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ @Override public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode) { Map<String, Object> result = new HashMap<>(); if (processDefinition == null) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineCode); } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { // check process definition online putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineCode); } else { result.put(Constants.STATUS, Status.SUCCESS); } return result; } /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ @Override public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project); if (checkResult != null) { return checkResult; } // check master exists if (!checkMasterExists(result)) { return result; } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (processInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionCode()); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } } checkResult = checkExecuteType(processInstance, executeType); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { return checkResult; } if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); } //get the startParams user specified at the first starting while repeat running is needed Map<String, Object> commandMap = JSONUtils.toMap(processInstance.getCommandParam(), String.class, Object.class); String startParams = null; if (MapUtils.isNotEmpty(commandMap) && executeType == ExecuteType.REPEAT_RUNNING) { Object startParamsJson = commandMap.get(Constants.CMD_PARAM_START_PARAMS); if (startParamsJson != null) { startParams = startParamsJson.toString(); } } switch (executeType) { case REPEAT_RUNNING: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING, startParams); break; case RECOVER_SUSPENDED_PROCESS: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS, startParams); break; case START_FAILURE_TASK_PROCESS: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS, startParams); break; case STOP: if (processInstance.getState() == ExecutionStatus.READY_STOP) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); } break; case PAUSE: if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); } break; default: logger.error("unknown execute type : {}", executeType); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); break; } return result; } /** * check tenant suitable * * @param processDefinition process definition * @return true if tenant suitable, otherwise return false */ private boolean checkTenantSuitable(ProcessDefinition processDefinition) { Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), processDefinition.getUserId()); return tenant != null; } /** * Check the state of process instance and the type of operation match * * @param processInstance process instance * @param executeType execute type * @return check result code */ private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { Map<String, Object> result = new HashMap<>(); ExecutionStatus executionStatus = processInstance.getState(); boolean checkResult = false; switch (executeType) { case PAUSE: case STOP: if (executionStatus.typeIsRunning()) { checkResult = true; } break; case REPEAT_RUNNING: if (executionStatus.typeIsFinished()) { checkResult = true; } break; case START_FAILURE_TASK_PROCESS: if (executionStatus.typeIsFailure()) { checkResult = true; } break; case RECOVER_SUSPENDED_PROCESS: if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { checkResult = true; } break; default: break; } if (!checkResult) { putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); } else { putMsg(result, Status.SUCCESS); } return result; } /** * prepare to update process instance command type and status * * @param processInstance process instance * @param commandType command type * @param executionStatus execute status * @return update result */ private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { Map<String, Object> result = new HashMap<>(); processInstance.setCommandType(commandType); processInstance.addHistoryCmd(commandType); processInstance.setState(executionStatus); int update = processService.updateProcessInstance(processInstance); // determine whether the process is normal if (update > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } return result; } /** * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution * * @param loginUser login user * @param instanceId instance id * @param processDefinitionId process definition id * @param commandType command type * @return insert result code */ private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType, String startParams) { Map<String, Object> result = new HashMap<>(); //To add startParams only when repeat running is needed Map<String, Object> cmdParam = new HashMap<>(); cmdParam.put(CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId); if (StringUtils.isNotEmpty(startParams)) { cmdParam.put(CMD_PARAM_START_PARAMS, startParams); } Command command = new Command(); command.setCommandType(commandType); command.setProcessDefinitionId(processDefinitionId); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(loginUser.getId()); if (!processService.verifyIsNeedCreateCommand(command)) { putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); return result; } int create = processService.createCommand(command); if (create > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } return result; } /** * check if sub processes are offline before starting process definition * * @param processDefineId process definition id * @return check result code */ @Override public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) { Map<String, Object> result = new HashMap<>(); if (processDefineId == 0) { logger.error("process definition id is null"); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); } List<Integer> ids = new ArrayList<>(); processService.recurseFindSubProcessId(processDefineId, ids); Integer[] idArray = ids.toArray(new Integer[ids.size()]); if (!ids.isEmpty()) { List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); if (processDefinitionList != null) { for (ProcessDefinition processDefinition : processDefinitionList) { /** * if there is no online process, exit directly */ if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); return result; } } } } putMsg(result, Status.SUCCESS); return result; } /** * create command * * @param commandType commandType * @param processDefineId processDefineId * @param nodeDep nodeDep * @param failureStrategy failureStrategy * @param startNodeList startNodeList * @param schedule schedule * @param warningType warningType * @param executorId executorId * @param warningGroupId warningGroupId * @param runMode runMode * @param processInstancePriority processInstancePriority * @param workerGroup workerGroup * @return command id */ private int createCommand(CommandType commandType, int processDefineId, TaskDependType nodeDep, FailureStrategy failureStrategy, String startNodeList, String schedule, WarningType warningType, int executorId, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Map<String, String> startParams) { /** * instantiate command schedule instance */ Command command = new Command(); Map<String, String> cmdParam = new HashMap<>(); if (commandType == null) { command.setCommandType(CommandType.START_PROCESS); } else { command.setCommandType(commandType); } command.setProcessDefinitionId(processDefineId); if (nodeDep != null) { command.setTaskDependType(nodeDep); } if (failureStrategy != null) { command.setFailureStrategy(failureStrategy); } if (StringUtils.isNotEmpty(startNodeList)) { cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); } if (warningType != null) { command.setWarningType(warningType); } if (startParams != null && startParams.size() > 0) { cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); } command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(executorId); command.setWarningGroupId(warningGroupId); command.setProcessInstancePriority(processInstancePriority); command.setWorkerGroup(workerGroup); Date start = null; Date end = null; if (StringUtils.isNotEmpty(schedule)) { String[] interval = schedule.split(","); if (interval.length == 2) { start = DateUtils.getScheduleDate(interval[0]); end = DateUtils.getScheduleDate(interval[1]); } } // determine whether to complement if (commandType == CommandType.COMPLEMENT_DATA) { runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; if (null != start && null != end && !start.after(end)) { if (runMode == RunMode.RUN_MODE_SERIAL) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); } else if (runMode == RunMode.RUN_MODE_PARALLEL) { List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); List<Date> listDate = new LinkedList<>(); if (!CollectionUtils.isEmpty(schedules)) { for (Schedule item : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); } } if (!CollectionUtils.isEmpty(listDate)) { // loop by schedule date for (Date date : listDate) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); processService.createCommand(command); } return listDate.size(); } else { // loop by day int runCunt = 0; while (!start.after(end)) { runCunt += 1; cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); processService.createCommand(command); start = DateUtils.getSomeDay(start, 1); } return runCunt; } } } else { logger.error("there is not valid schedule date for the process definition: id:{}", processDefineId); } } else { command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); } return 0; } /** * check result and auth */ private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) { // check project auth Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { return checkResult; } return null; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecutorControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; /** * executor controller test */ public class ExecutorControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); @Ignore @Test public void testStartProcessInstance() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefinitionId", "40"); paramsMap.add("scheduleTime", ""); paramsMap.add("failureStrategy", String.valueOf(FailureStrategy.CONTINUE)); paramsMap.add("startNodeList", ""); paramsMap.add("taskDependType", ""); paramsMap.add("execType", ""); paramsMap.add("warningType", String.valueOf(WarningType.NONE)); paramsMap.add("warningGroupId", ""); paramsMap.add("receivers", ""); paramsMap.add("receiversCc", ""); paramsMap.add("runMode", ""); paramsMap.add("processInstancePriority", ""); paramsMap.add("workerGroupId", ""); paramsMap.add("timeout", ""); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/start-process-instance", "cxc_1113") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Ignore @Test public void testExecute() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processInstanceId", "40"); paramsMap.add("executeType", String.valueOf(ExecuteType.NONE)); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/execute", "cxc_1113") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testStartCheckProcessDefinition() throws Exception { MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/start-check", "cxc_1113") .header(SESSION_ID, sessionId) .param("processDefinitionId", "40")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ExecutorServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** * executor service 2 test */ @RunWith(MockitoJUnitRunner.Silent.class) public class ExecutorService2Test { @InjectMocks private ExecutorServiceImpl executorService; @Mock private ProcessService processService; @Mock private ProcessDefinitionMapper processDefinitionMapper; @Mock private ProjectMapper projectMapper; @Mock private ProjectServiceImpl projectService; @Mock private MonitorService monitorService; private int processDefinitionId = 1; private int processInstanceId = 1; private int tenantId = 1; private int userId = 1; private ProcessDefinition processDefinition = new ProcessDefinition(); private ProcessInstance processInstance = new ProcessInstance(); private User loginUser = new User(); private String projectName = "projectName"; private Project project = new Project(); private String cronTime; @Before public void init() { // user loginUser.setId(userId); // processDefinition processDefinition.setId(processDefinitionId); processDefinition.setReleaseState(ReleaseState.ONLINE); processDefinition.setTenantId(tenantId); processDefinition.setUserId(userId); processDefinition.setVersion(1); processDefinition.setCode(1L); // processInstance processInstance.setId(processInstanceId); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setExecutorId(userId); processInstance.setTenantId(tenantId); processInstance.setProcessDefinitionVersion(1); processInstance.setProcessDefinitionCode(1L); // project project.setName(projectName); // cronRangeTime cronTime = "2020-01-01 00:00:00,2020-01-31 23:00:00"; // mock Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkProjectAndAuth()); Mockito.when(processDefinitionMapper.selectById(processDefinitionId)).thenReturn(processDefinition); Mockito.when(processService.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant()); Mockito.when(processService.createCommand(any(Command.class))).thenReturn(1); Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(getMasterServersList()); Mockito.when(processService.findProcessInstanceDetailById(processInstanceId)).thenReturn(processInstance); Mockito.when(processService.findProcessDefinition(1L, 1)).thenReturn(processDefinition); } /** * not complement */ @Test public void testNoComplement() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.START_PROCESS, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * not complement */ @Test public void testComplementWithStartNodeList() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.START_PROCESS, null, "n1,n2", null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * date error */ @Test public void testDateError() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, "2020-01-31 23:00:00,2020-01-01 00:00:00", CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS)); verify(processService, times(0)).createCommand(any(Command.class)); } /** * serial */ @Test public void testSerial() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * without schedule */ @Test public void testParallelWithOutSchedule() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(31)).createCommand(any(Command.class)); } /** * with schedule */ @Test public void testParallelWithSchedule() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(15)).createCommand(any(Command.class)); } @Test public void testNoMsterServers() { Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(new ArrayList<>()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS); } @Test public void testExecuteRepeatRunning() { Mockito.when(processService.verifyIsNeedCreateCommand(any(Command.class))).thenReturn(true); Map<String, Object> result = executorService.execute(loginUser, projectName, processInstanceId, ExecuteType.REPEAT_RUNNING); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } private List<Server> getMasterServersList() { List<Server> masterServerList = new ArrayList<>(); Server masterServer1 = new Server(); masterServer1.setId(1); masterServer1.setHost("192.168.220.188"); masterServer1.setPort(1121); masterServerList.add(masterServer1); Server masterServer2 = new Server(); masterServer2.setId(2); masterServer2.setHost("192.168.220.189"); masterServer2.setPort(1122); masterServerList.add(masterServer2); return masterServerList; } private List zeroSchedulerList() { return Collections.EMPTY_LIST; } private List<Schedule> oneSchedulerList() { List<Schedule> schedulerList = new LinkedList<>(); Schedule schedule = new Schedule(); schedule.setCrontab("0 0 0 1/2 * ?"); schedulerList.add(schedule); return schedulerList; } private Map<String, Object> checkProjectAndAuth() { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.controller.AbstractControllerTest; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ExecutorServiceImpl; import org.apache.dolphinscheduler.common.Constants; import java.text.MessageFormat; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** * executor service test */ public class ExecutorServiceTest extends AbstractControllerTest { private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); @Autowired private ExecutorServiceImpl executorService; @Ignore @Test public void startCheckByProcessDefinedId() { Map<String, Object> map = executorService.startCheckByProcessDefinedId(1234); Assert.assertNull(map); } @Test public void putMsgWithParamsTest() { Map<String, Object> map = new HashMap<>(); putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); logger.info(map.toString()); } void putMsgWithParams(Map<String, Object> result, Status status, Object... statusParams) { result.put(Constants.STATUS, status); if (statusParams != null && statusParams.length > 0) { result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); } else { result.put(Constants.MSG, status.getMsg()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="list-model" style="position: relative;"> <div class="table-box"> <el-table :data="list" size="mini" style="width: 100%" @selection-change="_arrDelChange"> <el-table-column type="selection" width="50" :selectable="selectable"></el-table-column> <el-table-column prop="id" :label="$t('#')" width="50"></el-table-column> <el-table-column :label="$t('Process Name')" min-width="200"> <template slot-scope="scope"> <el-popover trigger="hover" placement="top"> <p>{{ scope.row.name }}</p> <div slot="reference" class="name-wrapper"> <router-link :to="{ path: `/projects/${projectId}/definition/list/${scope.row.id}` }" tag="a" class="links"> <span class="ellipsis">{{scope.row.name}}</span> </router-link> </div> </el-popover> </template> </el-table-column> <el-table-column :label="$t('State')"> <template slot-scope="scope"> {{_rtPublishStatus(scope.row.releaseState)}} </template> </el-table-column> <el-table-column :label="$t('Create Time')" width="135"> <template slot-scope="scope"> <span>{{scope.row.createTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Update Time')" width="135"> <template slot-scope="scope"> <span>{{scope.row.updateTime | formatDate}}</span> </template> </el-table-column> <el-table-column :label="$t('Description')"> <template slot-scope="scope"> <span>{{scope.row.description | filterNull}}</span> </template> </el-table-column> <el-table-column prop="modifyBy" :label="$t('Modify User')"></el-table-column> <el-table-column :label="$t('Timing state')"> <template slot-scope="scope"> <span v-if="scope.row.scheduleReleaseState === 'OFFLINE'" class="time_offline">{{$t('offline')}}</span> <span v-if="scope.row.scheduleReleaseState === 'ONLINE'" class="time_online">{{$t('online')}}</span> <span v-if="!scope.row.scheduleReleaseState">-</span> </template> </el-table-column> <el-table-column :label="$t('Operation')" width="335" fixed="right"> <template slot-scope="scope"> <el-tooltip :content="$t('Edit')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_edit(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Start')" placement="top" :enterable="false"> <span><el-button type="success" size="mini" :disabled="scope.row.releaseState !== 'ONLINE'" icon="el-icon-video-play" @click="_start(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Timing')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-time" :disabled="scope.row.releaseState !== 'ONLINE' || scope.row.scheduleReleaseState !== null" @click="_timing(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('online')" placement="top" :enterable="false"> <span><el-button type="warning" size="mini" v-if="scope.row.releaseState === 'OFFLINE'" icon="el-icon-upload2" @click="_poponline(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('offline')" placement="top" :enterable="false"> <span><el-button type="danger" size="mini" icon="el-icon-download" v-if="scope.row.releaseState === 'ONLINE'" @click="_downline(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Copy Workflow')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" :disabled="scope.row.releaseState === 'ONLINE'" icon="el-icon-document-copy" @click="_copyProcess(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Cron Manage')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-date" :disabled="scope.row.releaseState !== 'ONLINE'" @click="_timingManage(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Delete')" placement="top" :enterable="false"> <el-popconfirm :confirmButtonText="$t('Confirm')" :cancelButtonText="$t('Cancel')" icon="el-icon-info" iconColor="red" :title="$t('Delete?')" @onConfirm="_delete(scope.row,scope.row.id)" > <el-button type="danger" size="mini" icon="el-icon-delete" :disabled="scope.row.releaseState === 'ONLINE'" circle slot="reference"></el-button> </el-popconfirm> </el-tooltip> <el-tooltip :content="$t('TreeView')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-s-data" @click="_treeView(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Export')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-s-unfold" @click="_export(scope.row)" circle></el-button></span> </el-tooltip> <el-tooltip :content="$t('Version Info')" placement="top" :enterable="false"> <span><el-button type="primary" size="mini" icon="el-icon-info" @click="_version(scope.row)" circle></el-button></span> </el-tooltip> </template> </el-table-column> </el-table> </div> <el-tooltip :content="$t('Delete')" placement="top"> <el-popconfirm :confirmButtonText="$t('Confirm')" :cancelButtonText="$t('Cancel')" :title="$t('Delete?')" @onConfirm="_delete({},-1)" > <el-button style="position: absolute; bottom: -48px; left: 19px;" type="primary" size="mini" :disabled="!strSelectIds" slot="reference">{{$t('Delete')}}</el-button> </el-popconfirm> </el-tooltip> <el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 80px;" @click="_batchExport(item)" >{{$t('Export')}}</el-button> <span><el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 140px;" @click="_batchCopy(item)" >{{$t('Batch copy')}}</el-button></span> <el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 225px;" @click="_batchMove(item)" >{{$t('Batch move')}}</el-button> <el-drawer :visible.sync="drawer" size="" :with-header="false"> <m-versions :versionData = versionData @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion" @closeVersion="closeVersion"></m-versions> </el-drawer> <el-dialog :title="$t('Please set the parameters before starting')" v-if="startDialog" :visible.sync="startDialog" width="auto"> <m-start :startData= "startData" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start> </el-dialog> <el-dialog :title="$t('Set parameters before timing')" :visible.sync="timingDialog" width="auto"> <m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing> </el-dialog> <el-dialog :title="$t('Info')" :visible.sync="relatedItemsDialog" width="auto"> <m-related-items :tmp="tmp" @onBatchCopy="onBatchCopy" @onBatchMove="onBatchMove" @closeRelatedItems="closeRelatedItems"></m-related-items> </el-dialog> </div> </template> <script> import _ from 'lodash' import mStart from './start' import mTiming from './timing' import mRelatedItems from './relatedItems' import { mapActions, mapState } from 'vuex' import { publishStatus } from '@/conf/home/pages/dag/_source/config' import mVersions from './versions' export default { name: 'definition-list', data () { return { list: [], strSelectIds: '', checkAll: false, drawer: false, versionData: { processDefinition: {}, processDefinitionVersions: [], total: null, pageNo: null, pageSize: null }, startDialog: false, startData: {}, timingDialog: false, timingData: { item: {}, type: '' }, relatedItemsDialog: false, tmp: false } }, props: { processList: Array, pageNo: Number, pageSize: Number }, methods: { ...mapActions('dag', ['editProcessState', 'getStartCheck', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion', 'moveProcess']), ...mapActions('security', ['getWorkerGroupsAll']), selectable (row, index) { if (row.releaseState === 'ONLINE') { return false } else { return true } }, _rtPublishStatus (code) { return _.filter(publishStatus, v => v.code === code)[0].desc }, _treeView (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/tree/${item.id}` }) }, /** * Start */ _start (item) { this.getWorkerGroupsAll() this.getStartCheck({ processDefinitionId: item.id }).then(res => { this.startData = item this.startDialog = true }).catch(e => { this.$message.error(e.msg || '') }) }, onUpdateStart () { this._onUpdate() this.startDialog = false }, closeStart () { this.startDialog = false }, /** * timing */ _timing (item) { this.timingData.item = item this.timingData.type = 'timing' this.timingDialog = true }, onUpdateTiming () { this._onUpdate() this.timingDialog = false }, closeTiming () { this.timingDialog = false }, /** * Timing manage */ _timingManage (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/list/timing/${item.code}` }) }, /** * delete */ _delete (item, i) { // remove tow++ if (i < 0) { this._batchDelete() return } // remove one this.deleteDefinition({ processDefinitionId: item.id }).then(res => { this._onUpdate() this.$message.success(res.msg) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * edit */ _edit (item) { this.$router.push({ path: `/projects/${this.projectId}/definition/list/${item.id}` }) }, /** * Offline */ _downline (item) { this._upProcessState({ processId: item.id, releaseState: 'OFFLINE' }) }, /** * online */ _poponline (item) { this._upProcessState({ processId: item.id, releaseState: 'ONLINE' }) }, /** * copy */ _copyProcess (item) { this.copyProcess({ processDefinitionIds: item.id, targetProjectId: item.projectId }).then(res => { this.strSelectIds = '' this.$message.success(res.msg) // $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, /** * move */ _moveProcess (item) { this.moveProcess({ processDefinitionIds: item.id, targetProjectId: item.projectId }).then(res => { this.strSelectIds = '' this.$message.success(res.msg) $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, _export (item) { this.exportDefinition({ processDefinitionIds: item.id, fileName: item.name }).catch(e => { this.$message.error(e.msg || '') }) }, /** * switch version in process definition version list * * @param version the version user want to change * @param processDefinitionId the process definition id * @param fromThis fromThis */ mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { this.switchProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success($t('Switch Version Successfully')) this.$router.push({ path: `/projects/${this.projectId}/definition/list/${processDefinitionId}` }) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * Paging event of process definition versions * * @param pageNo page number * @param pageSize page size * @param processDefinitionId the process definition id of page version * @param fromThis fromThis */ mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionCode, fromThis }) { this.getProcessDefinitionVersionsPage({ pageNo: pageNo, pageSize: pageSize, processDefinitionCode: processDefinitionCode }).then(res => { this.versionData.processDefinitionVersions = res.data.lists this.versionData.total = res.data.totalCount this.versionData.pageSize = res.data.pageSize this.versionData.pageNo = res.data.currentPage }).catch(e => { this.$message.error(e.msg || '') }) }, /** * delete one version of process definition * * @param version the version need to delete * @param processDefinitionId the process definition id user want to delete * @param fromThis fromThis */ mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, processDefinitionCode, fromThis }) { this.deleteProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success(res.msg || '') this.mVersionGetProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: processDefinitionCode, fromThis: fromThis }) }).catch(e => { this.$message.error(e.msg || '') }) }, _version (item) { this.getProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: item.code }).then(res => { let processDefinitionVersions = res.data.lists let total = res.data.totalCount let pageSize = res.data.pageSize let pageNo = res.data.currentPage this.versionData.processDefinition = item this.versionData.processDefinitionVersions = processDefinitionVersions this.versionData.total = total this.versionData.pageNo = pageNo this.versionData.pageSize = pageSize this.drawer = true }).catch(e => { this.$message.error(e.msg || '') }) }, closeVersion () { this.drawer = false }, _batchExport () { this.exportDefinition({ processDefinitionIds: this.strSelectIds, fileName: 'process_' + new Date().getTime() }).then(res => { this._onUpdate() this.checkAll = false this.strSelectIds = '' }).catch(e => { this.strSelectIds = '' this.checkAll = false this.$message.error(e.msg) }) }, /** * Batch Copy */ _batchCopy () { this.relatedItemsDialog = true this.tmp = false }, onBatchCopy (item) { this._copyProcess({ id: this.strSelectIds, projectId: item }) this.relatedItemsDialog = false }, closeRelatedItems () { this.relatedItemsDialog = false }, /** * _batchMove */ _batchMove () { this.tmp = true this.relatedItemsDialog = true }, onBatchMove (item) { this._moveProcess({ id: this.strSelectIds, projectId: item }) this.relatedItemsDialog = false }, /** * Edit state */ _upProcessState (o) { this.editProcessState(o).then(res => { this.$message.success(res.msg) $('body').find('.tooltip.fade.top.in').remove() this._onUpdate() }).catch(e => { this.$message.error(e.msg || '') }) }, _onUpdate () { this.$emit('on-update') }, /** * the array that to be delete */ _arrDelChange (v) { let arr = [] arr = _.map(v, 'id') this.strSelectIds = _.join(arr, ',') }, /** * batch delete */ _batchDelete () { this.batchDeleteDefinition({ processDefinitionIds: this.strSelectIds }).then(res => { this._onUpdate() this.checkAll = false this.strSelectIds = '' this.$message.success(res.msg) }).catch(e => { this.strSelectIds = '' this.checkAll = false this.$message.error(e.msg || '') }) } }, watch: { processList: { handler (a) { this.checkAll = false this.list = [] setTimeout(() => { this.list = _.cloneDeep(a) }) }, immediate: true, deep: true }, pageNo () { this.strSelectIds = '' } }, created () { }, mounted () { }, computed: { ...mapState('dag', ['projectId']) }, components: { mVersions, mStart, mTiming, mRelatedItems } } </script> <style lang="scss" rel="stylesheet/scss"> .time_online { background-color: #5cb85c; color: #fff; padding: 3px; } .time_offline { background-color: #ffc107; color: #fff; padding: 3px; } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,519
[Feature][JsonSplit-api]executors interface
from #5498 Change the request parameter processDefinitionId to processDefinitionCode,including the front end and controller interface
https://github.com/apache/dolphinscheduler/issues/5519
https://github.com/apache/dolphinscheduler/pull/5863
8f0c400ee094e9f93fd74e9d09f6258903f56d91
c5bc4fc48e67d3e8e1b40157403c8c4017ffae57
"2021-05-18T14:03:18Z"
java
"2021-07-20T09:22:10Z"
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' // delete 'definitionList' from tasks const deleteDefinitionList = (tasks) => { const newTasks = [] tasks.forEach(item => { const newItem = Object.assign({}, item) if (newItem.dependence && newItem.dependence.dependTaskList) { newItem.dependence.dependTaskList.forEach(dependTaskItem => { if (dependTaskItem.dependItemList) { dependTaskItem.dependItemList.forEach(dependItem => { Reflect.deleteProperty(dependItem, 'definitionList') }) } }) } newTasks.push(newItem) }) return newTasks } export default { /** * Task status acquisition */ getTaskState ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/task-list-by-process-id`, { processInstanceId: payload }, res => { const arr = _.map(res.data.taskList, v => { return _.cloneDeep(_.assign(tasksState[v.state], { name: v.name, stateId: v.id, dependentResult: v.dependentResult })) }) resolve({ list: arr, processInstanceState: res.data.processInstanceState, taskList: res.data.taskList }) }).catch(e => { reject(e) }) }) }, /** * Update process definition status */ editProcessState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/release`, { processId: payload.processId, releaseState: payload.releaseState }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * get process definition versions pagination info */ getProcessDefinitionVersionsPage ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/versions`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * switch process definition version */ switchProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * delete process definition version */ deleteProcessDefinitionVersion ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Update process instance status */ editExecutorsState ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/execute`, { processInstanceId: payload.processInstanceId, executeType: payload.executeType }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Verify that the DGA map name exists */ verifDAGName ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/verify-name`, { name: payload }, res => { state.name = payload resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ getProcessDetails ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/select-by-id`, { processId: payload }, res => { // process definition code state.code = res.data.code // version state.version = res.data.version // name state.name = res.data.name // description state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info state.tasks = processDefinitionJson.tasks // tasks cache state.cacheTasks = {} processDefinitionJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processDefinitionJson.globalParams // timeout state.timeout = processDefinitionJson.timeout state.tenantId = processDefinitionJson.tenantId resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process definition DAG diagram details */ copyProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/copy`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get process definition DAG diagram details */ moveProcess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/process/move`, { processDefinitionIds: payload.processDefinitionIds, targetProjectId: payload.targetProjectId }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get all the items created by the logged in user */ getAllItems ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/created-and-authorized-project', {}, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance DAG diagram details */ getInstancedetail ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-by-id`, { processInstanceId: payload }, res => { // code state.code = res.data.processDefinitionCode // version state.version = res.data.processDefinitionVersion // name state.name = res.data.name // desc state.description = res.data.description // connects state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) // process instance const processInstanceJson = JSON.parse(res.data.processInstanceJson) // tasks info state.tasks = processInstanceJson.tasks // tasks cache state.cacheTasks = {} processInstanceJson.tasks.forEach(v => { state.cacheTasks[v.id] = v }) // global params state.globalParams = processInstanceJson.globalParams // timeout state.timeout = processInstanceJson.timeout state.tenantId = processInstanceJson.tenantId // startup parameters state.startup = _.assign(state.startup, _.pick(res.data, ['commandType', 'failureStrategy', 'processInstancePriority', 'workerGroup', 'warningType', 'warningGroupId', 'receivers', 'receiversCc'])) state.startup.commandParam = JSON.parse(res.data.commandParam) resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Create process definition */ saveDAGchart ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/save`, { processDefinitionJson: JSON.stringify(data), name: _.trim(state.name), description: _.trim(state.description), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects) }, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Process definition update */ updateDefinition ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/process/update`, { processDefinitionJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), name: _.trim(state.name), description: _.trim(state.description), id: payload, releaseState: state.releaseState }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Process instance update */ updateInstance ({ state }, payload) { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, tasks: state.tasks, tenantId: state.tenantId, timeout: state.timeout } io.post(`projects/${state.projectName}/instance/update`, { processInstanceJson: JSON.stringify(data), locations: JSON.stringify(state.locations), connects: JSON.stringify(state.connects), processInstanceId: payload, syncDefine: state.syncDefine }, res => { resolve(res) state.isEditDag = false }).catch(e => { reject(e) }) }) }, /** * Get a list of process definitions (sub-workflow usage is not paged) */ getProcessList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.processListS.length) { resolve() return } io.get(`projects/${state.projectName}/process/list`, payload, res => { state.processListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions (list page usage with pagination) */ getProcessListP ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/list-paging`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of project */ getProjectList ({ state }, payload) { return new Promise((resolve, reject) => { if (state.projectListS.length) { resolve() return } io.get('projects/query-project-list', payload, res => { state.projectListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get a list of process definitions by project id */ getProcessByProjectId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/queryProcessDefinitionAllByProjectId`, payload, res => { resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get datasource */ getDatasourceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('datasources/list', { type: payload }, res => { resolve(res) }).catch(res => { reject(res) }) }) }, /** * get resources */ getResourcesList ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListS.length) { resolve() return } io.get('resources/list', { type: 'FILE' }, res => { state.resourcesListS = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * get jar */ getResourcesListJar ({ state }) { return new Promise((resolve, reject) => { if (state.resourcesListJar.length) { resolve() return } io.get('resources/list/jar', { type: 'FILE' }, res => { state.resourcesListJar = res.data resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get process instance */ getProcessInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/list-paging`, payload, res => { state.instanceListS = res.data.totalList resolve(res.data) }).catch(res => { reject(res) }) }) }, /** * Get alarm list */ getNotifyGroupList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('alert-group/list', res => { state.notifyGroupListS = _.map(res.data, v => { return { id: v.id, code: v.groupName, disabled: false } }) resolve(_.cloneDeep(state.notifyGroupListS)) }).catch(res => { reject(res) }) }) }, /** * Process definition startup interface */ processStart ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-process-instance`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * View log */ getLog ({ state }, payload) { return new Promise((resolve, reject) => { io.get('log/detail', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get the process instance id according to the process definition id * @param taskId */ getSubProcessId ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/select-sub-process`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Called before the process definition starts */ getStartCheck ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/executors/start-check`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Create timing */ createSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/create`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Preview timing */ previewSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/preview`, payload, res => { resolve(res.data) // alert(res.data) }).catch(e => { reject(e) }) }) }, /** * Timing list paging */ getScheduleList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectCode}/schedule/list-paging`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timing online */ scheduleOffline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/offline`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Timed offline */ scheduleOnline ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/online`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Edit timing */ updateSchedule ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectCode}/schedule/update`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete process instance */ deleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete process instance */ batchDeleteInstance ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Delete definition */ deleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Batch delete definition */ batchDeleteDefinition ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/batch-delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * export definition */ exportDefinition ({ state }, payload) { const downloadBlob = (data, fileNameS = 'json') => { if (!data) { return } const blob = new Blob([data]) const fileName = `${fileNameS}.json` if ('download' in document.createElement('a')) { // 不是IE浏览器 const url = window.URL.createObjectURL(blob) const link = document.createElement('a') link.style.display = 'none' link.href = url link.setAttribute('download', fileName) document.body.appendChild(link) link.click() document.body.removeChild(link) // 下载完成移除元素 window.URL.revokeObjectURL(url) // 释放掉blob对象 } else { // IE 10+ window.navigator.msSaveBlob(blob, fileName) } } io.get(`projects/${state.projectName}/process/export`, { processDefinitionIds: payload.processDefinitionIds }, res => { downloadBlob(res, payload.fileName) }, e => { }, { responseType: 'blob' }) }, /** * Process instance get variable */ getViewvariables ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-variables`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Get udfs function based on data source */ getUdfList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/udf-func/list', payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task instance list */ getTaskInstanceList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/task-instance/list-paging`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Force fail/kill/need_fault_tolerance task success */ forceTaskSuccess ({ state }, payload) { return new Promise((resolve, reject) => { io.post(`projects/${state.projectName}/task-instance/force-success`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, /** * Query task record list */ getTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query history task record list */ getHistoryTaskRecordList ({ state }, payload) { return new Promise((resolve, reject) => { io.get('projects/task-record/history-list-paging', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * tree chart */ getViewTree ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/view-tree`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * gantt chart */ getViewGantt ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/instance/view-gantt`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * Query task node list */ getProcessTasksList ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/gen-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) }, /** * remove timing */ deleteTiming ({ state }, payload) { return new Promise((resolve, reject) => { io.get(`projects/${state.projectCode}/schedule/delete`, payload, res => { resolve(res) }).catch(e => { reject(e) }) }) }, getResourceId ({ state }, payload) { return new Promise((resolve, reject) => { io.get('resources/queryResource', payload, res => { resolve(res.data) }).catch(e => { reject(e) }) }) } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/resourceTree.js
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="flink-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="programType" :disabled="isDetails"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Deploy Mode')}}</div> <div slot="content"> <el-radio-group v-model="deployMode" size="small"> <el-radio :label="'cluster'" :disabled="isDetails"></el-radio> <el-radio :label="'local'" :disabled="isDetails"></el-radio> </el-radio-group> </div> </m-list-box> <m-list-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('Flink Version')}}</div> <div slot="content"> <el-select style="width: 100px;" size="small" v-model="flinkVersion" :disabled="isDetails"> <el-option v-for="version in flinkVersionList" :key="version.code" :value="version.code" :label="version.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-4-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('JobManager Memory')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="jobManagerMemory" :placeholder="$t('Please enter JobManager memory')"> </el-input> </div> <div slot="text-2">{{$t('TaskManager Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="taskManagerMemory" :placeholder="$t('Please enter TaskManager memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('Slot Number')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="slot" :placeholder="$t('Please enter Slot number')"> </el-input> </div> <div slot="text-2" v-if="flinkVersion === '<1.10'">{{$t('TaskManager Number')}}</div> <div slot="content-2" v-if="flinkVersion === '<1.10'"> <el-input :disabled="isDetails" type="input" size="small" v-model="taskManager" :placeholder="$t('Please enter TaskManager number')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Parallelism')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="parallelism" :placeholder="$t('Please enter Parallelism')"> </el-input> </div> </m-list-4-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}<span class="copy-path" @mousedown="_copyPath($event, node)" >&nbsp; <em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy path')" ></em> &nbsp; </span></div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from './_source/localParams' import mListBox from './_source/listBox' import mList4Box from './_source/list4Box' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import Clipboard from 'clipboard' export default { name: 'flink', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Master jar package(List) mainJarLists: [], mainJarList: [], // Deployment method deployMode: 'cluster', // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom function localParams: [], // Slot number slot: 1, // Parallelism parallelism: 1, // TaskManager mumber taskManager: '2', // JobManager memory jobManagerMemory: '1G', // TaskManager memory taskManagerMemory: '2G', // Flink app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'SCALA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'SCALA' }, { code: 'PYTHON' }], flinkVersion: '<1.10', // Flink Versions(List) flinkVersionList: [{ code: '<1.10' }, { code: '>=1.10' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { _copyPath (e, node) { e.stopPropagation() let clipboard = new Clipboard('.copy-path', { text: function () { return node.raw.fullName } }) clipboard.on('success', handler => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', handler => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } if (!this.jobManagerMemory) { this.$message.warning(`${i18n.$t('Please enter JobManager memory')}`) return false } if (!Number.isInteger(parseInt(this.jobManagerMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.taskManagerMemory) { this.$message.warning(`${i18n.$t('Please enter TaskManager memory')}`) return false } if (!Number.isInteger(parseInt(this.taskManagerMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!Number.isInteger(parseInt(this.slot))) { this.$message.warning(`${i18n.$t('Please enter Slot number')}`) return false } if (!Number.isInteger(parseInt(this.parallelism))) { this.$message.warning(`${i18n.$t('Please enter Parallelism')}`) return false } if (this.flinkVersion === '<1.10' && !Number.isInteger(parseInt(this.taskManager))) { this.$message.warning(`${i18n.$t('Please enter TaskManager number')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, flinkVersion: this.flinkVersion, slot: this.slot, parallelism: this.parallelism, taskManager: this.taskManager, jobManagerMemory: this.jobManagerMemory, taskManagerMemory: this.taskManagerMemory, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType }) return true }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } } }, watch: { // Listening type programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: this.resourceIdArr, localParams: this.localParams, slot: this.slot, parallelism: this.parallelism, taskManager: this.taskManager, jobManagerMemory: this.jobManagerMemory, taskManagerMemory: this.taskManagerMemory, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.deployMode = o.params.deployMode || '' this.flinkVersion = o.params.flinkVersion || '<1.10' this.slot = o.params.slot || 1 this.parallelism = o.params.parallelism || 1 this.taskManager = o.params.taskManager || '2' this.jobManagerMemory = o.params.jobManagerMemory || '1G' this.taskManagerMemory = o.params.taskManagerMemory || '2G' this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'SCALA' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, mList4Box, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="mr-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select v-model="programType" :disabled="isDetails" style="width: 110px;" size="small"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}<span class="copy-path" @mousedown="_copyPath($event, node)" >&nbsp; <em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy path')" ></em> &nbsp; </span></div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mLocalParams from './_source/localParams' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import Clipboard from 'clipboard' export default { name: 'mr', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Main jar package (List) mainJarLists: [], mainJarList: [], // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom parameter localParams: [], // MR app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'JAVA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'PYTHON' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { _copyPath (e, node) { e.stopPropagation() let clipboard = new Clipboard('.copy-path', { text: function () { return node.raw.fullName } }) clipboard.on('success', handler => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', handler => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType }) return true } }, watch: { /** * monitor */ programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, resourceList: this.resourceIdArr, localParams: this.localParams, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'JAVA' // backfill resourceList let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let backResource = o.params.resourceList || [] let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="python-model"> <m-list-box> <div slot="text">{{$t('Script')}}</div> <div slot="content"> <div class="form-mirror"> <textarea id="code-python-mirror" name="code-python-mirror" style="opacity: 0;"> </textarea> <a class="ans-modal-box-max"> <em class="el-icon-full-screen" @click="setEditorVal"></em> </a> </div> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="resourceOptions" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}<span class="copy-path" @mousedown="_copyPath($event, node)" >&nbsp; <em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy path')" ></em> &nbsp; </span></div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> <el-dialog :visible.sync="scriptBoxDialog" append-to-body="true" width="80%"> <m-script-box :item="item" @getSriptBoxValue="getSriptBoxValue" @closeAble="closeAble"></m-script-box> </el-dialog> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mScriptBox from './_source/scriptBox' import mLocalParams from './_source/localParams' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' import Clipboard from 'clipboard' let editor export default { name: 'python', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter localParams: [], // resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], resourceOptions: [], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [], item: '', scriptBoxDialog: false } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { _copyPath (e, node) { e.stopPropagation() let clipboard = new Clipboard('.copy-path', { text: function () { return node.raw.fullName } }) clipboard.on('success', handler => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', handler => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, setEditorVal () { this.item = editor.getValue() this.scriptBoxDialog = true }, getSriptBoxValue (val) { editor.setValue(val) }, /** * return resourceList */ // _onResourcesData (a) { // this.resourceList = a // }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { // rawScript 验证 if (!editor.getValue()) { this.$message.warning(`${i18n.$t('Please enter script(required)')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // storage this.$emit('on-params', { resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, rawScript: editor.getValue() }) return true }, /** * Processing code highlighting */ _handlerEditor () { // editor editor = codemirror('code-python-mirror', { mode: 'python', readOnly: this.isDetails }) this.keypress = () => { if (!editor.getOption('readOnly')) { editor.showHint({ completeSingle: false }) } } // Monitor keyboard editor.on('keypress', this.keypress) editor.setValue(this.rawScript) return editor }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.resourceOptions.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.resourceOptions = this.resourceOptions.concat(noResources) } } } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.resourceOptions.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { resourceList: this.resourceIdArr, localParams: this.localParams } } }, created () { let item = this.store.state.dag.resourcesListS this.diGuiTree(item) this.resourceOptions = item let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.rawScript = o.params.rawScript || '' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { setTimeout(() => { this._handlerEditor() }, 200) }, destroyed () { editor.toTextArea() // Uninstall editor.off($('.code-python-mirror'), 'keypress', this.keypress) }, components: { mLocalParams, mListBox, Treeselect, mScriptBox } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="shell-model"> <m-list-box> <div slot="text">{{$t('Script')}}</div> <div slot="content"> <div class="form-mirror"> <textarea id="code-shell-mirror" name="code-shell-mirror" style="opacity: 0"> </textarea> <a class="ans-modal-box-max"> <em class="el-icon-full-screen" @click="setEditorVal"></em> </a> </div> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }} <span class="copy-path" @mousedown="_copyPath($event, node)" >&nbsp; <em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy path')" ></em> &nbsp; </span></div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="true"> </m-local-params> </div> </m-list-box> <el-dialog :visible.sync="scriptBoxDialog" append-to-body="true" width="80%"> <m-script-box :item="item" @getSriptBoxValue="getSriptBoxValue" @closeAble="closeAble"></m-script-box> </el-dialog> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mScriptBox from './_source/scriptBox' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' import Clipboard from 'clipboard' let editor export default { name: 'shell', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter localParams: [], // resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // define options options: [], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [], item: '', scriptBoxDialog: false } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { _copyPath (e, node) { e.stopPropagation() let clipboard = new Clipboard('.copy-path', { text: function () { return node.raw.fullName } }) clipboard.on('success', handler => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', handler => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, setEditorVal () { this.item = editor.getValue() this.scriptBoxDialog = true }, getSriptBoxValue (val) { editor.setValue(val) // this.scriptBoxDialog = false }, closeAble () { // this.scriptBoxDialog = false }, /** * return resourceList * */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { // rawScript verification if (!editor.getValue()) { this.$message.warning(`${i18n.$t('Please enter script(required)')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // Process resourcelist let dataProcessing = _.map(this.resourceList, v => { return { id: v } }) // storage this.$emit('on-params', { resourceList: dataProcessing, localParams: this.localParams, rawScript: editor.getValue() }) return true }, /** * Processing code highlighting */ _handlerEditor () { // editor editor = codemirror('code-shell-mirror', { mode: 'shell', readOnly: this.isDetails }) this.keypress = () => { if (!editor.getOption('readOnly')) { editor.showHint({ completeSingle: false }) } } // Monitor keyboard editor.on('keypress', this.keypress) editor.setValue(this.rawScript) return editor }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.options = this.options.concat(noResources) } } } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { resourceList: this.resourceIdArr, localParams: this.localParams } } }, created () { let item = this.store.state.dag.resourcesListS this.diGuiTree(item) this.options = item let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.rawScript = o.params.rawScript || '' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { setTimeout(() => { this._handlerEditor() }, 200) }, destroyed () { if (editor) { editor.toTextArea() // Uninstall editor.off($('.code-shell-mirror'), 'keypress', this.keypress) } }, components: { mLocalParams, mListBox, mScriptBox, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="spark-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="programType" :disabled="isDetails"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Spark Version')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="sparkVersion" :disabled="isDetails"> <el-option v-for="city in sparkVersionList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Deploy Mode')}}</div> <div slot="content"> <el-radio-group v-model="deployMode" size="small"> <el-radio :label="'cluster'" :disabled="isDetails"></el-radio> <el-radio :label="'client'" :disabled="isDetails"></el-radio> <el-radio :label="'local'" :disabled="isDetails"></el-radio> </el-radio-group> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-4-box> <div slot="text">{{$t('Driver Cores')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="driverCores" :placeholder="$t('Please enter Driver cores')"> </el-input> </div> <div slot="text-2">{{$t('Driver Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="driverMemory" :placeholder="$t('Please enter Driver memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Executor Number')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="numExecutors" :placeholder="$t('Please enter Executor number')"> </el-input> </div> <div slot="text-2">{{$t('Executor Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="executorMemory" :placeholder="$t('Please enter Executor memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Executor Cores')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="executorCores" :placeholder="$t('Please enter Executor cores')"> </el-input> </div> </m-list-4-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}<span class="copy-path" @mousedown="_copyPath($event, node)" >&nbsp; <em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy path')" ></em> &nbsp; </span></div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from './_source/localParams' import mListBox from './_source/listBox' import mList4Box from './_source/list4Box' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import Clipboard from 'clipboard' export default { name: 'spark', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Master jar package(List) mainJarLists: [], mainJarList: [], // Deployment method deployMode: 'cluster', // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom function localParams: [], // Driver cores driverCores: 1, // Driver memory driverMemory: '512M', // Executor number numExecutors: 2, // Executor memory executorMemory: '2G', // Executor cores executorCores: 2, // Spark app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'SCALA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'SCALA' }, { code: 'PYTHON' }], // Spark version sparkVersion: 'SPARK2', // Spark version(LIst) sparkVersionList: [{ code: 'SPARK2' }, { code: 'SPARK1' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { _copyPath (e, node) { e.stopPropagation() let clipboard = new Clipboard('.copy-path', { text: function () { return node.raw.fullName } }) clipboard.on('success', handler => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', handler => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } if (!this.driverCores) { this.$message.warning(`${i18n.$t('Please enter Driver cores')}`) return false } if (!Number.isInteger(parseInt(this.driverCores))) { this.$message.warning(`${i18n.$t('Core number should be positive integer')}`) return false } if (!this.driverMemory) { this.$message.warning(`${i18n.$t('Please enter Driver memory')}`) return false } if (!Number.isInteger(parseInt(this.driverMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.executorCores) { this.$message.warning(`${i18n.$t('Please enter Executor cores')}`) return false } if (!Number.isInteger(parseInt(this.executorCores))) { this.$message.warning(`${i18n.$t('Core number should be positive integer')}`) return false } if (!this.executorMemory) { this.$message.warning(`${i18n.$t('Please enter Executor memory')}`) return false } if (!Number.isInteger(parseInt(this.executorMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.numExecutors) { this.$message.warning(`${i18n.$t('Please enter Executor number')}`) return false } if (!Number.isInteger(parseInt(this.numExecutors))) { this.$message.warning(`${i18n.$t('The Executor number should be a positive integer')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // Process resourcelist let dataProcessing = _.map(this.resourceList, v => { return { id: v } }) // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: dataProcessing, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, numExecutors: this.numExecutors, executorMemory: this.executorMemory, executorCores: this.executorCores, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType, sparkVersion: this.sparkVersion }) return true } }, watch: { // Listening type programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: this.resourceIdArr, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, numExecutors: this.numExecutors, executorMemory: this.executorMemory, executorCores: this.executorCores, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType, sparkVersion: this.sparkVersion } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.deployMode = o.params.deployMode || '' this.driverCores = o.params.driverCores || 1 this.driverMemory = o.params.driverMemory || '512M' this.numExecutors = o.params.numExecutors || 2 this.executorMemory = o.params.executorMemory || '2G' this.executorCores = o.params.executorCores || 2 this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'SCALA' this.sparkVersion = o.params.sparkVersion || 'SPARK2' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, mList4Box, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,825
[Bug][WEB] the resource tree in the process definition of latest dev branch can't display correctly.
A clear and concise description of what the bug is. When we add multiple resources to a existed process definition and reopen the edit page, the resource list would't show as expected and throws an undefined exception. ![image](https://user-images.githubusercontent.com/52202080/125821430-b869c06d-dcac-4033-906a-9bf48377c766.png) **Which version of Dolphin Scheduler:** latest dev
https://github.com/apache/dolphinscheduler/issues/5825
https://github.com/apache/dolphinscheduler/pull/5826
5e343d3fd21ceb2bf141e7b70524b5b3eb5fb87b
6964c090c7a1cb3d1d69f5fe70ca3025df9b4be3
"2021-07-15T16:27:00Z"
java
"2021-07-20T12:48:58Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/waterdrop.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="waterdrop-model"> <!--deploy mode--> <m-list-box> <div slot="text">{{$t('Deploy Mode')}}</div> <div slot="content"> <el-radio-group size="small" v-model="deployMode"> <el-radio :label="'client'" :disabled="isDetails"></el-radio> <el-radio :label="'cluster'" :disabled="isDetails"></el-radio> <el-radio :label="'local'" :disabled="isDetails"></el-radio> </el-radio-group> </div> </m-list-box> <!--master--> <m-list-box v-if="deployMode !== 'local'"> <div slot="text">{{$t('Master')}}</div> <div slot="content" class="display-flex"> <el-select size="small" v-model="master" :disabled="isDetails"> <el-option v-for="city in masterType" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> <el-input :disabled="isDetails" type="input" size="small" v-model="masterUrl" :placeholder="$t('Please Enter Url')" class="cont-extra" v-if="masterUrlState"> </el-input> </div> </m-list-box> <!--queue--> <m-list-box v-if="deployMode !== 'local' && master === 'yarn'"> <div slot="text">{{$t('Queue')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="queue" :placeholder="$t('Please enter queue value')" style="width: 192px;"> </el-input> </div> </m-list-box> <!--config file--> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" maxHeight="200" :disable-branch-nodes="true" :multiple="true" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <!--custom parameters--> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' export default { name: 'waterdrop', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // waterdrop script baseScript: 'sh ${WATERDROP_HOME}/bin/start-waterdrop.sh', // eslint-disable-line // resourceNameVal resourceNameVal: [], // Custom parameter localParams: [], // resource(list) resourceList: [], // Deployment method deployMode: 'client', // Deployment master queue: 'default', // Deployment master master: 'yarn', // Spark version(LIst) masterType: [{ code: 'yarn' }, { code: 'local' }, { code: 'spark://' }, { code: 'mesos://' }], // Deployment masterUrl state masterUrlState: false, // Deployment masterUrl masterUrl: '', // Cache ResourceList cacheResourceList: [], // define options options: [], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList * */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // noRes if (!this.resourceNameVal.resourceList) { this.$message.warning(`${i18n.$t('Please select the waterdrop resources')}`) return false } if (this.resourceNameVal.resourceList && this.resourceNameVal.resourceList.length === 0) { this.$message.warning(`${i18n.$t('Please select the waterdrop resources')}`) return false } // Process resourcelist let dataProcessing = _.map(this.resourceList, v => { return { id: v } }) // verify deploy mode let deployMode = this.deployMode let master = this.master let masterUrl = this.masterUrl if (this.deployMode === 'local') { master = 'local' masterUrl = '' deployMode = 'client' } // get local params let locparams = '' this.localParams.forEach(v => { locparams = locparams + ' --variable ' + v.prop + '=' + v.value } ) // get waterdrop script let tureScript = '' this.resourceNameVal.resourceList.forEach(v => { tureScript = tureScript + this.baseScript + ' --master ' + master + masterUrl + ' --deploy-mode ' + deployMode + ' --queue ' + this.queue + ' --config ' + v.res + locparams + ' \n' }) // storage this.$emit('on-params', { resourceList: dataProcessing, localParams: this.localParams, rawScript: tureScript }) return true }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.options = this.options.concat(noResources) } } } }, watch: { // Watch the cacheParams cacheParams (val) { this.resourceNameVal = val this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result }, master: { handler (code) { if (code === 'spark://') { this.masterUrlState = true } else if (code === 'mesos://') { this.masterUrlState = true } else { this.masterUrlState = false this.masterUrl = '' } } } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { resourceList: this.resourceIdArr, localParams: this.localParams, deployMode: this.deployMode, master: this.master, masterUrl: this.masterUrl, queue: this.queue } } }, created () { let item = this.store.state.dag.resourcesListS this.diGuiTree(item) this.options = item let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.master = o.params.master || 'yarn' this.deployMode = o.params.deployMode || 'client' this.masterUrl = o.params.masterUrl || '' this.queue = o.params.queue || 'default' this.rawScript = o.params.rawScript || '' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, destroyed () { }, components: { mLocalParams, mListBox, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,769
[Improvement][UI] When we try to delete the existing dag, the console in web browser would shows exception.
version: latest dev ![image](https://user-images.githubusercontent.com/52202080/124861316-e5276080-dfe5-11eb-864b-4e438dcf2c64.png) ![image](https://user-images.githubusercontent.com/52202080/124861614-82829480-dfe6-11eb-9a00-b60cd86633af.png)
https://github.com/apache/dolphinscheduler/issues/5769
https://github.com/apache/dolphinscheduler/pull/5770
4a68bfbe1c816f9b1f2f43c1f01218689b57ed6f
8571461fe8c0eea9cdb9738be87e43f827f8e00f
"2021-07-08T05:09:36Z"
java
"2021-07-21T10:50:32Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import Vue from 'vue' import _ from 'lodash' import i18n from '@/module/i18n' import { jsPlumb } from 'jsplumb' import JSP from './plugIn/jsPlumbHandle' import DownChart from './plugIn/downChart' import store from '@/conf/home/store' import dagre from 'dagre' /** * Prototype method */ const Dag = function () { this.dag = {} this.instance = {} } /** * init * @dag dag vue instance */ Dag.prototype.init = function ({ dag, instance }) { this.dag = dag this.instance = instance } /** * set init config */ Dag.prototype.setConfig = function (o) { JSP.setConfig(o) } /** * create dag */ Dag.prototype.create = function () { const self = this const plumbIns = jsPlumb.getInstance() plumbIns.reset() plumbIns.ready(() => { JSP.init({ dag: this.dag, instance: this.instance, options: { onRemoveNodes ($id) { self.dag.removeEventModelById($id) } } }) // init event JSP.handleEvent() // init draggable JSP.draggable() }) } /** * Action event on the right side of the toolbar */ Dag.prototype.toolbarEvent = function ({ item, code, is }) { const self = this switch (code) { case 'pointer': JSP.handleEventPointer(is) break case 'line': JSP.handleEventLine(is) break case 'remove': JSP.handleEventRemove() break case 'screen': JSP.handleEventScreen({ item, is }) break case 'download': Vue.prototype.$confirm(`${i18n.$t('Please confirm whether the workflow has been saved before downloading')}`, `${i18n.$t('Download')}`, { confirmButtonText: `${i18n.$t('Confirm')}`, cancelButtonText: `${i18n.$t('Cancel')}`, type: 'warning' }).then(() => { DownChart.download({ dagThis: self.dag }) }).catch(() => { }) break } } /** * Echo data display */ Dag.prototype.backfill = function (arg) { if (arg) { const marginX = 100 const g = new dagre.graphlib.Graph() g.setGraph({}) g.setDefaultEdgeLabel(function () { return {} }) for (const i in store.state.dag.locations) { const location = store.state.dag.locations[i] g.setNode(i, { label: i, width: Math.min(location.name.length * 7, 170), height: 150 }) } for (const i in store.state.dag.connects) { const connect = store.state.dag.connects[i] g.setEdge(connect.endPointSourceId, connect.endPointTargetId) } dagre.layout(g) const dataObject = {} g.nodes().forEach(function (v) { const node = g.node(v) const location = store.state.dag.locations[node.label] const obj = {} obj.name = location.name obj.x = node.x + marginX obj.y = node.y obj.targetarr = location.targetarr dataObject[node.label] = obj }) jsPlumb.ready(() => { JSP.init({ dag: this.dag, instance: this.instance, options: { onRemoveNodes ($id) { this.dag.removeEventModelById($id) } } }) // Backfill JSP.jspBackfill({ // connects connects: _.cloneDeep(store.state.dag.connects), // Node location information locations: _.cloneDeep(dataObject), // Node data largeJson: _.cloneDeep(store.state.dag.tasks) }) }) } else { const plumbIns = jsPlumb.getInstance() plumbIns.reset() plumbIns.ready(() => { JSP.init({ dag: this.dag, instance: this.instance, options: { onRemoveNodes ($id) { this.dag.removeEventModelById($id) } } }) // Backfill JSP.jspBackfill({ // connects connects: _.cloneDeep(store.state.dag.connects), // Node location information locations: _.cloneDeep(store.state.dag.locations), // Node data largeJson: _.cloneDeep(store.state.dag.tasks) }) }) } } /** * Get dag storage format data */ Dag.prototype.saveStore = function () { return JSP.saveStore() } export default new Dag()
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,808
[Bug][Server] When we try to transfer data using datax between different types of data sources, the worker will exit with ClassCastException
**Describe the bug** When we try to transfer data using datax between different types of data sources, the worker will exit with ClassCastException ![image](https://user-images.githubusercontent.com/52202080/125315605-e0f29e80-e369-11eb-8b9c-a94d8faa9eda.png) ```java [INFO] 2021-07-12 23:36:28.682 - [taskAppId=TASK-545173233664_3-3-3]:[464] - try to execute sql analysis query column name [ERROR] 2021-07-12 23:36:28.688 org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread:[171] - task scheduler failure java.lang.RuntimeException: java.lang.ClassCastException: org.apache.dolphinscheduler.common.datasource.clickhouse.ClickhouseConnectionParam cannot be cast to org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam at org.apache.dolphinscheduler.common.datasource.DatasourceUtil.getConnection(DatasourceUtil.java:84) at org.apache.dolphinscheduler.server.worker.task.datax.DataxTask.tryExecuteSqlResolveColumnNames(DataxTask.java:557) at org.apache.dolphinscheduler.server.worker.task.datax.DataxTask.parsingSqlColumnNames(DataxTask.java:465) at org.apache.dolphinscheduler.server.worker.task.datax.DataxTask.buildDataxJobContentJson(DataxTask.java:286) at org.apache.dolphinscheduler.server.worker.task.datax.DataxTask.buildDataxJsonFile(DataxTask.java:215) at org.apache.dolphinscheduler.server.worker.task.datax.DataxTask.handle(DataxTask.java:166) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:159) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) at java.util.concurrent.FutureTask.run(FutureTask.java) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.ClassCastException: org.apache.dolphinscheduler.common.datasource.clickhouse.ClickhouseConnectionParam cannot be cast to org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam at org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor.getConnection(MysqlDatasourceProcessor.java:113) at org.apache.dolphinscheduler.common.datasource.DatasourceUtil.getConnection(DatasourceUtil.java:82) ... 12 common frames omitted ``` **Which version of Dolphin Scheduler:** latest dev **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5808
https://github.com/apache/dolphinscheduler/pull/5809
4c0993cdf490eb0898fba42908270d10dff32001
bca92157a088c78d245ff60dfee2504ea8716c6a
"2021-07-12T15:48:53Z"
java
"2021-07-27T07:46:15Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.datax; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.common.datasource.DatasourceUtil; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.commons.io.FileUtils; import java.io.File; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr; import com.alibaba.druid.sql.ast.statement.SQLSelect; import com.alibaba.druid.sql.ast.statement.SQLSelectItem; import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock; import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.druid.sql.ast.statement.SQLUnionQuery; import com.alibaba.druid.sql.parser.SQLStatementParser; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * DataX task */ public class DataxTask extends AbstractTask { /** * jvm parameters */ public static final String JVM_PARAM = " --jvm=\"-Xms%sG -Xmx%sG\" "; /** * python process(datax only supports version 2.7 by default) */ private static final String DATAX_PYTHON = "python2.7"; private static final Pattern PYTHON_PATH_PATTERN = Pattern.compile("/bin/python[\\d.]*$"); /** * datax path */ private static final String DATAX_PATH = "${DATAX_HOME}/bin/datax.py"; /** * datax channel count */ private static final int DATAX_CHANNEL_COUNT = 1; /** * datax parameters */ private DataxParameters dataXParameters; /** * shell command executor */ private ShellCommandExecutor shellCommandExecutor; /** * taskExecutionContext */ private TaskExecutionContext taskExecutionContext; /** * constructor * * @param taskExecutionContext taskExecutionContext * @param logger logger */ public DataxTask(TaskExecutionContext taskExecutionContext, Logger logger) { super(taskExecutionContext, logger); this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger); } /** * init DataX config */ @Override public void init() { logger.info("datax task params {}", taskExecutionContext.getTaskParams()); dataXParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DataxParameters.class); if (!dataXParameters.checkParameters()) { throw new RuntimeException("datax task params is not valid"); } } /** * run DataX process * * @throws Exception if error throws Exception */ @Override public void handle() throws Exception { try { // set the name of the current thread String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); // replace placeholder,and combine local and global parameters Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext,getParameters()); // run datax procesDataSourceService.s String jsonFilePath = buildDataxJsonFile(paramsMap); String shellCommandFilePath = buildShellCommandFile(jsonFilePath, paramsMap); CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath); setExitStatusCode(commandExecuteResult.getExitStatusCode()); setAppIds(commandExecuteResult.getAppIds()); setProcessId(commandExecuteResult.getProcessId()); } catch (Exception e) { setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } } /** * cancel DataX process * * @param cancelApplication cancelApplication * @throws Exception if error throws Exception */ @Override public void cancelApplication(boolean cancelApplication) throws Exception { // cancel process shellCommandExecutor.cancelApplication(); } /** * build datax configuration file * * @return datax json file name * @throws Exception if error throws Exception */ private String buildDataxJsonFile(Map<String, Property> paramsMap) throws Exception { // generate json String fileName = String.format("%s/%s_job.json", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); String json; Path path = new File(fileName).toPath(); if (Files.exists(path)) { return fileName; } if (dataXParameters.getCustomConfig() == Flag.YES.ordinal()) { json = dataXParameters.getJson().replaceAll("\\r\\n", "\n"); } else { ObjectNode job = JSONUtils.createObjectNode(); job.putArray("content").addAll(buildDataxJobContentJson()); job.set("setting", buildDataxJobSettingJson()); ObjectNode root = JSONUtils.createObjectNode(); root.set("job", job); root.set("core", buildDataxCoreJson()); json = root.toString(); } // replace placeholder json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap)); logger.debug("datax job json : {}", json); // create datax json file FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8); return fileName; } /** * build datax job config * * @return collection of datax job config JSONObject * @throws SQLException if error throws SQLException */ private List<ObjectNode> buildDataxJobContentJson() { DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext(); BaseConnectionParam dataSourceCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams( DbType.of(dataxTaskExecutionContext.getSourcetype()), dataxTaskExecutionContext.getSourceConnectionParams()); BaseConnectionParam dataTargetCfg = (BaseConnectionParam) DatasourceUtil.buildConnectionParams( DbType.of(dataxTaskExecutionContext.getTargetType()), dataxTaskExecutionContext.getTargetConnectionParams()); List<ObjectNode> readerConnArr = new ArrayList<>(); ObjectNode readerConn = JSONUtils.createObjectNode(); ArrayNode sqlArr = readerConn.putArray("querySql"); for (String sql : new String[]{dataXParameters.getSql()}) { sqlArr.add(sql); } ArrayNode urlArr = readerConn.putArray("jdbcUrl"); urlArr.add(DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDtType()), dataSourceCfg)); readerConnArr.add(readerConn); ObjectNode readerParam = JSONUtils.createObjectNode(); readerParam.put("username", dataSourceCfg.getUser()); readerParam.put("password", CommonUtils.decodePassword(dataSourceCfg.getPassword())); readerParam.putArray("connection").addAll(readerConnArr); ObjectNode reader = JSONUtils.createObjectNode(); reader.put("name", DataxUtils.getReaderPluginName(DbType.of(dataxTaskExecutionContext.getSourcetype()))); reader.set("parameter", readerParam); List<ObjectNode> writerConnArr = new ArrayList<>(); ObjectNode writerConn = JSONUtils.createObjectNode(); ArrayNode tableArr = writerConn.putArray("table"); tableArr.add(dataXParameters.getTargetTable()); writerConn.put("jdbcUrl", DatasourceUtil.getJdbcUrl(DbType.valueOf(dataXParameters.getDsType()), dataTargetCfg)); writerConnArr.add(writerConn); ObjectNode writerParam = JSONUtils.createObjectNode(); writerParam.put("username", dataTargetCfg.getUser()); writerParam.put("password", CommonUtils.decodePassword(dataTargetCfg.getPassword())); String[] columns = parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()), DbType.of(dataxTaskExecutionContext.getTargetType()), dataSourceCfg, dataXParameters.getSql()); ArrayNode columnArr = writerParam.putArray("column"); for (String column : columns) { columnArr.add(column); } writerParam.putArray("connection").addAll(writerConnArr); if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) { ArrayNode preSqlArr = writerParam.putArray("preSql"); for (String preSql : dataXParameters.getPreStatements()) { preSqlArr.add(preSql); } } if (CollectionUtils.isNotEmpty(dataXParameters.getPostStatements())) { ArrayNode postSqlArr = writerParam.putArray("postSql"); for (String postSql : dataXParameters.getPostStatements()) { postSqlArr.add(postSql); } } ObjectNode writer = JSONUtils.createObjectNode(); writer.put("name", DataxUtils.getWriterPluginName(DbType.of(dataxTaskExecutionContext.getTargetType()))); writer.set("parameter", writerParam); List<ObjectNode> contentList = new ArrayList<>(); ObjectNode content = JSONUtils.createObjectNode(); content.set("reader", reader); content.set("writer", writer); contentList.add(content); return contentList; } /** * build datax setting config * * @return datax setting config JSONObject */ private ObjectNode buildDataxJobSettingJson() { ObjectNode speed = JSONUtils.createObjectNode(); speed.put("channel", DATAX_CHANNEL_COUNT); if (dataXParameters.getJobSpeedByte() > 0) { speed.put("byte", dataXParameters.getJobSpeedByte()); } if (dataXParameters.getJobSpeedRecord() > 0) { speed.put("record", dataXParameters.getJobSpeedRecord()); } ObjectNode errorLimit = JSONUtils.createObjectNode(); errorLimit.put("record", 0); errorLimit.put("percentage", 0); ObjectNode setting = JSONUtils.createObjectNode(); setting.set("speed", speed); setting.set("errorLimit", errorLimit); return setting; } private ObjectNode buildDataxCoreJson() { ObjectNode speed = JSONUtils.createObjectNode(); speed.put("channel", DATAX_CHANNEL_COUNT); if (dataXParameters.getJobSpeedByte() > 0) { speed.put("byte", dataXParameters.getJobSpeedByte()); } if (dataXParameters.getJobSpeedRecord() > 0) { speed.put("record", dataXParameters.getJobSpeedRecord()); } ObjectNode channel = JSONUtils.createObjectNode(); channel.set("speed", speed); ObjectNode transport = JSONUtils.createObjectNode(); transport.set("channel", channel); ObjectNode core = JSONUtils.createObjectNode(); core.set("transport", transport); return core; } /** * create command * * @return shell command file name * @throws Exception if error throws Exception */ private String buildShellCommandFile(String jobConfigFilePath, Map<String, Property> paramsMap) throws Exception { // generate scripts String fileName = String.format("%s/%s_node.%s", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); Path path = new File(fileName).toPath(); if (Files.exists(path)) { return fileName; } // datax python command StringBuilder sbr = new StringBuilder(); sbr.append(getPythonCommand()); sbr.append(" "); sbr.append(DATAX_PATH); sbr.append(" "); sbr.append(loadJvmEnv(dataXParameters)); sbr.append(jobConfigFilePath); // replace placeholder String dataxCommand = ParameterUtils.convertParameterPlaceholders(sbr.toString(), ParamUtils.convert(paramsMap)); logger.debug("raw script : {}", dataxCommand); // create shell command file Set<PosixFilePermission> perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); if (OSUtils.isWindows()) { Files.createFile(path); } else { Files.createFile(path, attr); } Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND); return fileName; } public String getPythonCommand() { String pythonHome = System.getenv("PYTHON_HOME"); return getPythonCommand(pythonHome); } public String getPythonCommand(String pythonHome) { if (StringUtils.isEmpty(pythonHome)) { return DATAX_PYTHON; } String pythonBinPath = "/bin/" + DATAX_PYTHON; Matcher matcher = PYTHON_PATH_PATTERN.matcher(pythonHome); if (matcher.find()) { return matcher.replaceAll(pythonBinPath); } return Paths.get(pythonHome, pythonBinPath).toString(); } public String loadJvmEnv(DataxParameters dataXParameters) { int xms = dataXParameters.getXms() < 1 ? 1 : dataXParameters.getXms(); int xmx = dataXParameters.getXmx() < 1 ? 1 : dataXParameters.getXmx(); return String.format(JVM_PARAM, xms, xmx); } /** * parsing synchronized column names in SQL statements * * @param dsType the database type of the data source * @param dtType the database type of the data target * @param dataSourceCfg the database connection parameters of the data source * @param sql sql for data synchronization * @return Keyword converted column names */ private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseConnectionParam dataSourceCfg, String sql) { String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql); if (columnNames == null || columnNames.length == 0) { logger.info("try to execute sql analysis query column name"); columnNames = tryExecuteSqlResolveColumnNames(dataSourceCfg, sql); } notNull(columnNames, String.format("parsing sql columns failed : %s", sql)); return DataxUtils.convertKeywordsColumns(dtType, columnNames); } /** * try grammatical parsing column * * @param dbType database type * @param sql sql for data synchronization * @return column name array * @throws RuntimeException if error throws RuntimeException */ private String[] tryGrammaticalAnalysisSqlColumnNames(DbType dbType, String sql) { String[] columnNames; try { SQLStatementParser parser = DataxUtils.getSqlStatementParser(dbType, sql); if (parser == null) { logger.warn("database driver [{}] is not support grammatical analysis sql", dbType); return new String[0]; } SQLStatement sqlStatement = parser.parseStatement(); SQLSelectStatement sqlSelectStatement = (SQLSelectStatement) sqlStatement; SQLSelect sqlSelect = sqlSelectStatement.getSelect(); List<SQLSelectItem> selectItemList = null; if (sqlSelect.getQuery() instanceof SQLSelectQueryBlock) { SQLSelectQueryBlock block = (SQLSelectQueryBlock) sqlSelect.getQuery(); selectItemList = block.getSelectList(); } else if (sqlSelect.getQuery() instanceof SQLUnionQuery) { SQLUnionQuery unionQuery = (SQLUnionQuery) sqlSelect.getQuery(); SQLSelectQueryBlock block = (SQLSelectQueryBlock) unionQuery.getRight(); selectItemList = block.getSelectList(); } notNull(selectItemList, String.format("select query type [%s] is not support", sqlSelect.getQuery().toString())); columnNames = new String[selectItemList.size()]; for (int i = 0; i < selectItemList.size(); i++) { SQLSelectItem item = selectItemList.get(i); String columnName = null; if (item.getAlias() != null) { columnName = item.getAlias(); } else if (item.getExpr() != null) { if (item.getExpr() instanceof SQLPropertyExpr) { SQLPropertyExpr expr = (SQLPropertyExpr) item.getExpr(); columnName = expr.getName(); } else if (item.getExpr() instanceof SQLIdentifierExpr) { SQLIdentifierExpr expr = (SQLIdentifierExpr) item.getExpr(); columnName = expr.getName(); } } else { throw new RuntimeException( String.format("grammatical analysis sql column [ %s ] failed", item.toString())); } if (columnName == null) { throw new RuntimeException( String.format("grammatical analysis sql column [ %s ] failed", item.toString())); } columnNames[i] = columnName; } } catch (Exception e) { logger.warn(e.getMessage(), e); return new String[0]; } return columnNames; } /** * try to execute sql to resolve column names * * @param baseDataSource the database connection parameters * @param sql sql for data synchronization * @return column name array */ public String[] tryExecuteSqlResolveColumnNames(BaseConnectionParam baseDataSource, String sql) { String[] columnNames; sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql); sql = sql.replace(";", ""); try ( Connection connection = DatasourceUtil.getConnection(DbType.valueOf(dataXParameters.getDtType()), baseDataSource); PreparedStatement stmt = connection.prepareStatement(sql); ResultSet resultSet = stmt.executeQuery()) { ResultSetMetaData md = resultSet.getMetaData(); int num = md.getColumnCount(); columnNames = new String[num]; for (int i = 1; i <= num; i++) { columnNames[i - 1] = md.getColumnName(i); } } catch (SQLException e) { logger.warn(e.getMessage(), e); return null; } return columnNames; } @Override public AbstractParameters getParameters() { return dataXParameters; } private void notNull(Object obj, String message) { if (obj == null) { throw new RuntimeException(message); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,830
[Improvement][Web] Improve the selection style in dag edit dialog
*For better global communication, please give priority to using English description, thx! * **Describe the question** When we want to select all or part of the text in the input box, because the selected background color is too similar to the original background color, users often think that the text cannot be selected ![db6deb90d479ae54c0a0f89a0c8a1c838bc149d4](https://user-images.githubusercontent.com/52202080/125908608-abec7fec-2c45-47f4-a8ba-a23a4a60abbf.gif) **Which version of DolphinScheduler:** latest dev branch **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5830
https://github.com/apache/dolphinscheduler/pull/5829
bca92157a088c78d245ff60dfee2504ea8716c6a
8db1b042b2523bd3b6ed363303283308ea14b2cc
"2021-07-16T07:23:41Z"
java
"2021-07-27T15:54:07Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="clearfix dag-model" > <div class="toolbar"> <div class="title"><span>{{$t('Toolbar')}}</span></div> <div class="toolbar-btn"> <div class="bar-box roundedRect jtk-draggable jtk-droppable jtk-endpoint-anchor jtk-connected" :class="v === dagBarId ? 'active' : ''" :id="v" :key="v" v-for="(item,v) in tasksTypeList" @mousedown="_getDagId(v)"> <div data-toggle="tooltip" :title="item.desc"> <div class="icos" :class="'icos-' + v" ></div> </div> </div> </div> </div> <div class="dag-contect"> <div class="dag-toolbar"> <div class="assist-btn"> <el-tooltip :content="$t('View variables')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :disabled="$route.name !== 'projects-instance-details'" @click="_toggleView" icon="el-icon-c-scale-to-original"> </el-button> </span> </el-tooltip> <el-tooltip :content="$t('Startup parameter')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :disabled="$route.name !== 'projects-instance-details'" @click="_toggleParam" icon="el-icon-arrow-right"> </el-button> </span> </el-tooltip> <span class="name">{{name}}</span> &nbsp; <span v-if="name" class="copy-name" @click="_copyName" :data-clipboard-text="name"><em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy name')" ></em></span> </div> <div class="save-btn"> <div class="operation" style="vertical-align: middle;"> <a href="javascript:" v-for="(item,$index) in toolOperList" :class="_operationClass(item)" :id="item.code" :key="$index" @click="_ckOperation(item,$event)"> <el-tooltip :content="item.desc" placement="top" :enterable="false"> <span><el-button type="text" class="operBtn" :icon="item.icon"></el-button></span> </el-tooltip> </a> </div> <el-tooltip :content="$t('Format DAG')" placement="top" :enterable="false"> <span> <el-button type="primary" icon="el-icon-caret-right" size="mini" v-if="(type === 'instance' || 'definition') && urlParam.id !=undefined" style="vertical-align: middle;" @click="dagAutomaticLayout"> </el-button> </span> </el-tooltip> <el-tooltip :content="$t('Refresh DAG status')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" icon="el-icon-refresh" type="primary" :loading="isRefresh" v-if="type === 'instance'" @click="!isRefresh && _refresh()" size="mini" > </el-button> </span> </el-tooltip> <el-button v-if="isRtTasks" style="vertical-align: middle;" type="primary" size="mini" icon="el-icon-back" @click="_rtNodesDag" > {{$t('Return_1')}} </el-button> <span> <el-button type="primary" icon="el-icon-switch-button" size="mini" v-if="(type === 'instance' || 'definition') " style="vertical-align: middle;" @click="_closeDAG"> {{$t('Close')}} </el-button> </span> <el-button style="vertical-align: middle;" type="primary" size="mini" :loading="spinnerLoading" @click="_saveChart" icon="el-icon-document-checked" > {{spinnerLoading ? 'Loading...' : $t('Save')}} </el-button> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :loading="spinnerLoading" @click="_version" :disabled="$route.params.id == null" icon="el-icon-info"> {{spinnerLoading ? 'Loading...' : $t('Version Info')}} </el-button> </span> </div> </div> <div class="scrollbar dag-container"> <div class="jtk-demo" id="jtk-demo"> <div class="jtk-demo-canvas canvas-wide statemachine-demo jtk-surface jtk-surface-nopan jtk-draggable" id="canvas" ></div> </div> </div> <el-drawer :visible.sync="drawer" size="" :with-header="false"> <m-versions :versionData = versionData :isInstance="type === 'instance'" @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion" @closeVersion="closeVersion"></m-versions> </el-drawer> <el-drawer :visible.sync="nodeDrawer" size="" :with-header="false"> <m-form-model v-if="nodeDrawer" :nodeData=nodeData @seeHistory="seeHistory" @addTaskInfo="addTaskInfo" @cacheTaskInfo="cacheTaskInfo" @close="close" @onSubProcess="onSubProcess"></m-form-model> </el-drawer> <el-drawer :visible.sync="lineDrawer" size="" :wrapperClosable="false" :with-header="false"> <m-form-line-model :lineData = lineData @addLineInfo="addLineInfo" @cancel="cancel"></m-form-line-model> </el-drawer> <el-drawer :visible.sync="udpDrawer" size="" :wrapperClosable="false" :with-header="false"> <m-udp></m-udp> </el-drawer> <el-dialog :title="$t('Set the DAG diagram name')" :visible.sync="dialogVisible" width="auto"> <m-udp ref="mUdp" @onUdp="onUdpDialog" @close="closeDialog"></m-udp> </el-dialog> <el-dialog :title="$t('Please set the parameters before starting')" :visible.sync="startDialog" width="auto"> <m-start :startData= "startData" :startNodeList="startNodeList" :sourceType="sourceType" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start> </el-dialog> </div> </div> </template> <script> import _ from 'lodash' import Dag from './dag' import mUdp from './udp/udp' import i18n from '@/module/i18n' import { jsPlumb } from 'jsplumb' import Clipboard from 'clipboard' import { allNodesId } from './plugIn/util' import { toolOper, tasksType } from './config' import mFormModel from './formModel/formModel' import mFormLineModel from './formModel/formLineModel' import { formatDate } from '@/module/filter/filter' import { findComponentDownward } from '@/module/util/' import disabledState from '@/module/mixin/disabledState' import { mapActions, mapState, mapMutations } from 'vuex' import mStart from '../../projects/pages/definition/pages/list/_source/start' import mVersions from '../../projects/pages/definition/pages/list/_source/versions' let eventModel export default { name: 'dag-chart', data () { return { tasksTypeList: tasksType, toolOperList: toolOper(this), dagBarId: null, toolOperCode: '', spinnerLoading: false, urlParam: { id: this.$route.params.id || null }, isRtTasks: false, isRefresh: false, isLoading: false, taskId: null, arg: false, versionData: { processDefinition: { id: null, version: '', releaseState: '' }, processDefinitionVersions: [], total: null, pageNo: null, pageSize: null }, drawer: false, nodeData: { id: null, taskType: '', self: {}, preNode: [], rearList: [], instanceId: null }, nodeDrawer: false, lineData: { id: null, sourceId: '', targetId: '' }, lineDrawer: false, udpDrawer: false, dialogVisible: false, startDialog: false, startData: {}, startNodeList: '', sourceType: '' } }, mixins: [disabledState], props: { type: String, releaseState: String }, methods: { ...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']), ...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects', 'resetLocalParam']), startRunning (item, startNodeList, sourceType) { this.startData = item this.startNodeList = startNodeList this.sourceType = sourceType this.startDialog = true }, onUpdateStart () { this.startDialog = false }, closeStart () { this.startDialog = false }, // DAG automatic layout dagAutomaticLayout () { if (this.store.state.dag.isEditDag) { this.$message.warning(`${i18n.$t('Please save the DAG before formatting')}`) return false } $('#canvas').html('') // Destroy round robin Dag.init({ dag: this, instance: jsPlumb.getInstance({ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ 'Arrow', { location: 1, id: 'arrow', length: 12, foldback: 0.8 } ], ['Label', { location: 0.5, id: 'label' }] ], Container: 'canvas', ConnectionsDetachable: true }) }) if (this.tasks.length) { Dag.backfill(true) if (this.type === 'instance') { this._getTaskState(false).then(res => {}) } } else { Dag.create() } }, init (args) { if (this.tasks.length) { Dag.backfill(args) // Process instances can view status if (this.type === 'instance') { this._getTaskState(false).then(res => {}) // Round robin acquisition status this.setIntervalP = setInterval(() => { this._getTaskState(true).then(res => {}) }, 90000) } } else { Dag.create() } }, /** * copy name */ _copyName () { let clipboard = new Clipboard('.copy-name') clipboard.on('success', e => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', e => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * Get state interface * @param isReset Whether to manually refresh */ _getTaskState (isReset) { return new Promise((resolve, reject) => { this.getTaskState(this.urlParam.id).then(res => { let data = res.list let state = res.processInstanceState let taskList = res.taskList let idArr = allNodesId() const titleTpl = (item, desc) => { let $item = _.filter(taskList, v => v.name === item.name)[0] return `<div style="text-align: left">${i18n.$t('Name')}:${$item.name}</br>${i18n.$t('State')}:${desc}</br>${i18n.$t('type')}:${$item.taskType}</br>${i18n.$t('host')}:${$item.host || '-'}</br>${i18n.$t('Retry Count')}:${$item.retryTimes}</br>${i18n.$t('Submit Time')}:${formatDate($item.submitTime)}</br>${i18n.$t('Start Time')}:${formatDate($item.startTime)}</br>${i18n.$t('End Time')}:${$item.endTime ? formatDate($item.endTime) : '-'}</br></div>` } // remove tip state dom $('.w').find('.state-p').html('') const newTask = [] data.forEach(v1 => { idArr.forEach(v2 => { if (v2.name === v1.name) { let dom = $(`#${v2.id}`) let state = dom.find('.state-p') let depState = '' taskList.forEach(item => { if (item.name === v1.name) { depState = item.state const params = item.taskParams ? JSON.parse(item.taskParams) : '' let localParam = params.localParams || [] newTask.push({ id: v2.id, localParam }) } }) dom.attr('data-state-id', v1.stateId) dom.attr('data-dependent-result', v1.dependentResult || '') dom.attr('data-dependent-depState', depState) state.append(`<strong class="${v1.icoUnicode} ${v1.isSpin ? 'as as-spin' : ''}" style="color:${v1.color}" data-toggle="tooltip" data-html="true" data-container="body"></strong>`) state.find('strong').attr('title', titleTpl(v2, v1.desc)) } }) }) if (state === 'PAUSE' || state === 'STOP' || state === 'FAILURE' || this.state === 'SUCCESS') { // Manual refresh does not regain large json if (isReset) { findComponentDownward(this.$root, `${this.type}-details`)._reset() } } if (!isReset) { this.resetLocalParam(newTask) } resolve() }) }) }, /** * Get the action bar id * @param item */ _getDagId (v) { // if (this.isDetails) { // return // } this.dagBarId = v }, /** * operating */ _ckOperation (item) { let is = true let code = '' if (item.disable) { return } if (this.toolOperCode === item.code) { this.toolOperCode = '' code = item.code is = false } else { this.toolOperCode = item.code code = this.toolOperCode is = true } // event type Dag.toolbarEvent({ item: item, code: code, is: is }) }, _operationClass (item) { return this.toolOperCode === item.code ? 'active' : '' // if (item.disable) { // return this.toolOperCode === item.code ? 'active' : '' // } else { // return 'disable' // } }, /** * Storage interface */ _save (sourceType) { return new Promise((resolve, reject) => { this.spinnerLoading = true // Storage store Dag.saveStore().then(res => { if (this._verifConditions(res.tasks)) { if (this.urlParam.id) { /** * Edit * @param saveInstanceEditDAGChart => Process instance editing * @param saveEditDAGChart => Process definition editing */ this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => { // this.$message.success(res.msg) this.$message({ message: res.msg, type: 'success', offset: 80 }) this.spinnerLoading = false // Jump process definition if (this.type === 'instance') { this.$router.push({ path: `/projects/${this.projectId}/instance/list/${this.urlParam.id}` }) } else { this.$router.push({ path: `/projects/${this.projectId}/definition/list/${this.urlParam.id}` }) } resolve() }).catch(e => { this.$message.error(e.msg || '') this.spinnerLoading = false reject(e) }) } else { // New this.saveDAGchart().then(res => { this.$message.success(res.msg) this.spinnerLoading = false // source @/conf/home/pages/dag/_source/editAffirmModel/index.js if (sourceType !== 'affirm') { // Jump process definition this.$router.push({ name: 'projects-definition-list' }) } resolve() }).catch(e => { this.$message.error(e.msg || '') this.setName('') this.spinnerLoading = false reject(e) }) } } }) }) }, _closeDAG () { let $name = this.$route.name if ($name && $name.indexOf('definition') !== -1) { this.$router.push({ name: 'projects-definition-list' }) } else { this.$router.push({ name: 'projects-instance-list' }) } }, _verifConditions (value) { let tasks = value let bool = true tasks.map(v => { if (v.type === 'CONDITIONS' && (v.conditionResult.successNode[0] === '' || v.conditionResult.successNode[0] === null || v.conditionResult.failedNode[0] === '' || v.conditionResult.failedNode[0] === null)) { bool = false return false } }) if (!bool) { this.$message.warning(`${i18n.$t('Successful branch flow and failed branch flow are required')}`) this.spinnerLoading = false return false } return true }, onUdpDialog () { this._save() this.dialogVisible = false }, closeDialog () { this.dialogVisible = false }, /** * Save chart */ _saveChart () { // Verify node if (!this.tasks.length) { this.$message.warning(`${i18n.$t('Failed to create node to save')}`) return } this.dialogVisible = true this.$nextTick(() => { this.$refs.mUdp.reloadParam() }) }, /** * Return to the previous child node */ _rtNodesDag () { let getIds = this.$route.query.subProcessIds let idsArr = getIds.split(',') let ids = idsArr.slice(0, idsArr.length - 1) let id = idsArr[idsArr.length - 1] let query = {} if (id !== idsArr[0]) { query = { subProcessIds: ids.join(',') } } let $name = this.$route.name.split('-') this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${id}`, query: query }) }, /** * Subprocess processing * @param subProcessId Subprocess ID */ _subProcessHandle (subProcessId) { let subProcessIds = [] let getIds = this.$route.query.subProcessIds if (getIds) { let newId = getIds.split(',') newId.push(this.urlParam.id) subProcessIds = newId } else { subProcessIds.push(this.urlParam.id) } let $name = this.$route.name.split('-') this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${subProcessId}`, query: { subProcessIds: subProcessIds.join(',') } }) }, /** * Refresh data */ _refresh () { this.isRefresh = true this._getTaskState(false).then(res => { setTimeout(() => { this.isRefresh = false this.$message.success(`${i18n.$t('Refresh status succeeded')}`) }, 2200) }) }, /** * View variables */ _toggleView () { findComponentDownward(this.$root, 'assist-dag-index')._toggleView() }, /** * Starting parameters */ _toggleParam () { findComponentDownward(this.$root, 'starting-params-dag-index')._toggleParam() }, addLineInfo ({ item, fromThis }) { this.addConnects(item) this.lineDrawer = false }, cancel ({ fromThis }) { this.lineDrawer = false }, /** * Create a node popup layer * @param Object id */ _createLineLabel ({ id, sourceId, targetId }) { this.lineData.id = id this.lineData.sourceId = sourceId this.lineData.targetId = targetId this.lineDrawer = true }, seeHistory (taskName) { this.nodeData.self.$router.push({ name: 'task-instance', query: { processInstanceId: this.nodeData.self.$route.params.id, taskName: taskName } }) }, addTaskInfo ({ item, fromThis }) { this.addTasks(item) this.nodeDrawer = false }, cacheTaskInfo ({ item, fromThis }) { this.cacheTasks(item) }, close ({ item, flag, fromThis }) { this.addTasks(item) // Edit status does not allow deletion of nodes if (flag) { jsPlumb.remove(this.nodeData.id) } this.nodeDrawer = false }, onSubProcess ({ subProcessId, fromThis }) { this._subProcessHandle(subProcessId) }, _createNodes ({ id, type }) { let self = this let preNode = [] let rearNode = [] let rearList = [] $('div[data-targetarr*="' + id + '"]').each(function () { rearNode.push($(this).attr('id')) }) if (rearNode.length > 0) { rearNode.forEach(v => { let rearobj = {} rearobj.value = $(`#${v}`).find('.name-p').text() rearobj.label = $(`#${v}`).find('.name-p').text() rearList.push(rearobj) }) } else { rearList = [] } let targetarr = $(`#${id}`).attr('data-targetarr') if (targetarr) { let nodearr = targetarr.split(',') nodearr.forEach(v => { let nodeobj = {} nodeobj.value = $(`#${v}`).find('.name-p').text() nodeobj.label = $(`#${v}`).find('.name-p').text() preNode.push(nodeobj) }) } else { preNode = [] } this.taskId = id type = type || self.dagBarId this.nodeData.id = id this.nodeData.taskType = type this.nodeData.self = self this.nodeData.preNode = preNode this.nodeData.rearList = rearList this.nodeData.instanceId = this.$route.params.id this.nodeDrawer = true }, removeEventModelById ($id) { if (eventModel && this.taskId === $id) { eventModel.remove() } }, /** * switch version in process definition version list * * @param version the version user want to change * @param processDefinitionId the process definition id * @param fromThis fromThis */ mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { this.$store.state.dag.isSwitchVersion = true this.switchProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success($t('Switch Version Successfully')) this.$router.push({ path: `/projects/${this.projectId}/definition/list/${processDefinitionId}` }) }).catch(e => { this.$store.state.dag.isSwitchVersion = false this.$message.error(e.msg || '') }) }, /** * Paging event of process definition versions * * @param pageNo page number * @param pageSize page size * @param processDefinitionId the process definition id of page version * @param fromThis fromThis */ mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionCode, fromThis }) { this.getProcessDefinitionVersionsPage({ pageNo: pageNo, pageSize: pageSize, processDefinitionCode: processDefinitionCode }).then(res => { this.versionData.processDefinitionVersions = res.data.lists this.versionData.total = res.data.totalCount this.versionData.pageSize = res.data.pageSize this.versionData.pageNo = res.data.currentPage }).catch(e => { this.$message.error(e.msg || '') }) }, /** * delete one version of process definition * * @param version the version need to delete * @param processDefinitionId the process definition id user want to delete * @param fromThis fromThis */ mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, processDefinitionCode, fromThis }) { this.deleteProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success(res.msg || '') this.mVersionGetProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: processDefinitionCode, fromThis: fromThis }) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * query the process definition pagination version */ _version (item) { this.getProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: this.store.state.dag.code }).then(res => { let processDefinitionVersions = res.data.lists let total = res.data.totalCount let pageSize = res.data.pageSize let pageNo = res.data.currentPage this.versionData.processDefinition.id = this.urlParam.id this.versionData.processDefinition.code = this.store.state.dag.code this.versionData.processDefinition.version = this.$store.state.dag.version this.versionData.processDefinition.releaseState = this.releaseState this.versionData.processDefinitionVersions = processDefinitionVersions this.versionData.total = total this.versionData.pageNo = pageNo this.versionData.pageSize = pageSize this.drawer = true }).catch(e => { this.$message.error(e.msg || '') }) }, closeVersion () { this.drawer = false } }, watch: { tasks: { deep: true, handler (o) { // Edit state does not allow deletion of node a... this.setIsEditDag(true) } } }, created () { // Edit state does not allow deletion of node a... this.setIsEditDag(false) if (this.$route.query.subProcessIds) { this.isRtTasks = true } Dag.init({ dag: this, instance: jsPlumb.getInstance({ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ 'Arrow', { location: 1, id: 'arrow', length: 12, foldback: 0.8 } ], ['Label', { location: 0.5, id: 'label' }] ], Container: 'canvas', ConnectionsDetachable: true }) }) }, mounted () { this.init(this.arg) }, beforeDestroy () { this.resetParams() // Destroy round robin clearInterval(this.setIntervalP) }, destroyed () { if (eventModel) { eventModel.remove() } }, computed: { ...mapState('dag', ['tasks', 'locations', 'connects', 'isEditDag', 'name', 'projectId']) }, components: { mVersions, mFormModel, mFormLineModel, mUdp, mStart } } </script> <style lang="scss" rel="stylesheet/scss"> @import "./dag"; .operBtn { padding: 8px 6px; } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,830
[Improvement][Web] Improve the selection style in dag edit dialog
*For better global communication, please give priority to using English description, thx! * **Describe the question** When we want to select all or part of the text in the input box, because the selected background color is too similar to the original background color, users often think that the text cannot be selected ![db6deb90d479ae54c0a0f89a0c8a1c838bc149d4](https://user-images.githubusercontent.com/52202080/125908608-abec7fec-2c45-47f4-a8ba-a23a4a60abbf.gif) **Which version of DolphinScheduler:** latest dev branch **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5830
https://github.com/apache/dolphinscheduler/pull/5829
bca92157a088c78d245ff60dfee2504ea8716c6a
8db1b042b2523bd3b6ed363303283308ea14b2cc
"2021-07-16T07:23:41Z"
java
"2021-07-27T15:54:07Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="udp-model"> <div class="scrollbar contpi-boxt"> <div> <el-input type="text" size="small" v-model="name" :disabled="router.history.current.name === 'projects-instance-details'" :placeholder="$t('Please enter name (required)')"> </el-input> </div> <template v-if="router.history.current.name !== 'projects-instance-details'"> <div style="padding-top: 12px;"> <el-input type="textarea" size="small" v-model="description" :autosize="{minRows:2}" :placeholder="$t('Please enter description(optional)')" autocomplete="off"> </el-input> </div> </template> <div class="title" style="padding-top: 6px;"> <span class="text-b">{{$t('select tenant')}}</span> <form-tenant v-model="tenantId"></form-tenant> </div> <div class="title" style="padding-top: 6px;"> <span class="text-b">{{$t('warning of timeout')}}</span> <span style="padding-left: 6px;"> <el-switch v-model="checkedTimeout" size="small"></el-switch> </span> </div> <div class="content" style="padding-bottom: 10px;" v-if="checkedTimeout"> <span> <el-input v-model="timeout" style="width: 160px;" maxlength="9" size="small"> <span slot="append">{{$t('Minute')}}</span> </el-input> </span> </div> <div class="title" style="padding-top: 6px;"> <span>{{$t('Set global')}}</span> </div> <div class="content"> <div> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="udpList" :hide="false"> </m-local-params> </div> </div> </div> <div class="bottom"> <div class="submit"> <template v-if="router.history.current.name === 'projects-definition-details'"> <div class="lint-pt"> <el-checkbox v-model="releaseState" size="small" :false-label="'OFFLINE'" :true-label="'ONLINE'">{{$t('Whether to go online the process definition')}}</el-checkbox> </div> </template> <template v-if="router.history.current.name === 'projects-instance-details'"> <div class="lint-pt"> <el-checkbox v-model="syncDefine" size="small">{{$t('Whether to update the process definition')}}</el-checkbox> </div> </template> <el-button type="text" size="small" @click="close()"> {{$t('Cancel')}} </el-button> <el-button type="primary" size="small" round :disabled="isDetails" @click="ok()">{{$t('Add')}}</el-button> </div> </div> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from '../formModel/tasks/_source/localParams' import disabledState from '@/module/mixin/disabledState' import Affirm from '../jumpAffirm' import FormTenant from './_source/selectTenant' export default { name: 'udp', data () { return { originalName: '', // dag name name: '', // dag description description: '', // Global custom parameters udpList: [], // Global custom parameters udpListCache: [], // Whether to go online the process definition releaseState: 'ONLINE', // Whether to update the process definition syncDefine: true, // Timeout alarm timeout: 0, tenantId: -1, // checked Timeout alarm checkedTimeout: true } }, mixins: [disabledState], props: { }, methods: { /** * udp data */ _onLocalParams (a) { this.udpList = a }, _verifTimeout () { const reg = /^[1-9]\d*$/ if (!reg.test(this.timeout)) { this.$message.warning(`${i18n.$t('Please enter a positive integer greater than 0')}`) return false } return true }, _accuStore () { const udp = _.cloneDeep(this.udpList) udp.forEach(u => { delete u.ifFixed }) this.store.commit('dag/setGlobalParams', udp) this.store.commit('dag/setName', _.cloneDeep(this.name)) this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout)) this.store.commit('dag/setTenantId', _.cloneDeep(this.tenantId)) this.store.commit('dag/setDesc', _.cloneDeep(this.description)) this.store.commit('dag/setSyncDefine', this.syncDefine) this.store.commit('dag/setReleaseState', this.releaseState) }, /** * submit */ ok () { if (!this.name) { this.$message.warning(`${i18n.$t('DAG graph name cannot be empty')}`) return } let _verif = () => { // verification udf if (!this.$refs.refLocalParams._verifProp()) { return } // verification timeout if (this.checkedTimeout && !this._verifTimeout()) { return } // Storage global globalParams this._accuStore() Affirm.setIsPop(false) this.$emit('onUdp') } if (this.originalName !== this.name) { this.store.dispatch('dag/verifDAGName', this.name).then(res => { _verif() }).catch(e => { this.$message.error(e.msg || '') }) } else { _verif() } }, /** * Close the popup */ close () { this.$emit('close') }, /** * reload localParam */ reloadParam () { const dag = _.cloneDeep(this.store.state.dag) let globalParams = _.cloneDeep(dag.globalParams) let udpList = [...globalParams] this.udpList = udpList this.udpListCache = udpList } }, watch: { checkedTimeout (val) { if (!val) { this.timeout = 0 this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout)) } } }, created () { const dag = _.cloneDeep(this.store.state.dag) this.name = dag.name this.originalName = dag.name this.description = dag.description this.syncDefine = dag.syncDefine this.releaseState = dag.releaseState this.timeout = dag.timeout || 0 this.checkedTimeout = this.timeout !== 0 this.$nextTick(() => { if (dag.tenantId > -1) { this.tenantId = dag.tenantId } else if (this.store.state.user.userInfo.tenantId) { this.tenantId = this.store.state.user.userInfo.tenantId } }) }, mounted () {}, components: { FormTenant, mLocalParams } } </script> <style lang="scss" rel="stylesheet/scss"> .udp-model { width: 624px; min-height: 420px; background: #fff; border-radius: 3px; padding:20px 0 ; position: relative; .contpi-boxt { max-height: 600px; overflow-y: scroll; padding:0 20px; } .title { line-height: 36px; padding-bottom: 10px; span { font-size: 16px; color: #333; } } .bottom{ position: absolute; bottom: 0; left: 0; width: 100%; text-align: right; height: 56px; line-height: 56px; border-top: 1px solid #DCDEDC; background: #fff; .submit { padding-right: 20px; margin-top: -4px; } .lint-pt { position: absolute; left: 20px; top: -2px; >label { font-weight: normal; } } } .content { padding-bottom: 50px; .user-def-params-model { .add { a { color: #0097e0; } } } } } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,352
[Bug][ui] input field cat't select multiple characters ,cat't copy
[Bug][ui] input field cat't select multiple characters ,cat't copy There are two kind of the input field of node definition , distinguish by background. One of them has a white background.When we input something in this One of them has a white background ,we can't select characters use mouse
https://github.com/apache/dolphinscheduler/issues/5352
https://github.com/apache/dolphinscheduler/pull/5829
bca92157a088c78d245ff60dfee2504ea8716c6a
8db1b042b2523bd3b6ed363303283308ea14b2cc
"2021-04-22T06:35:31Z"
java
"2021-07-27T15:54:07Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="clearfix dag-model" > <div class="toolbar"> <div class="title"><span>{{$t('Toolbar')}}</span></div> <div class="toolbar-btn"> <div class="bar-box roundedRect jtk-draggable jtk-droppable jtk-endpoint-anchor jtk-connected" :class="v === dagBarId ? 'active' : ''" :id="v" :key="v" v-for="(item,v) in tasksTypeList" @mousedown="_getDagId(v)"> <div data-toggle="tooltip" :title="item.desc"> <div class="icos" :class="'icos-' + v" ></div> </div> </div> </div> </div> <div class="dag-contect"> <div class="dag-toolbar"> <div class="assist-btn"> <el-tooltip :content="$t('View variables')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :disabled="$route.name !== 'projects-instance-details'" @click="_toggleView" icon="el-icon-c-scale-to-original"> </el-button> </span> </el-tooltip> <el-tooltip :content="$t('Startup parameter')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :disabled="$route.name !== 'projects-instance-details'" @click="_toggleParam" icon="el-icon-arrow-right"> </el-button> </span> </el-tooltip> <span class="name">{{name}}</span> &nbsp; <span v-if="name" class="copy-name" @click="_copyName" :data-clipboard-text="name"><em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy name')" ></em></span> </div> <div class="save-btn"> <div class="operation" style="vertical-align: middle;"> <a href="javascript:" v-for="(item,$index) in toolOperList" :class="_operationClass(item)" :id="item.code" :key="$index" @click="_ckOperation(item,$event)"> <el-tooltip :content="item.desc" placement="top" :enterable="false"> <span><el-button type="text" class="operBtn" :icon="item.icon"></el-button></span> </el-tooltip> </a> </div> <el-tooltip :content="$t('Format DAG')" placement="top" :enterable="false"> <span> <el-button type="primary" icon="el-icon-caret-right" size="mini" v-if="(type === 'instance' || 'definition') && urlParam.id !=undefined" style="vertical-align: middle;" @click="dagAutomaticLayout"> </el-button> </span> </el-tooltip> <el-tooltip :content="$t('Refresh DAG status')" placement="top" :enterable="false"> <span> <el-button style="vertical-align: middle;" icon="el-icon-refresh" type="primary" :loading="isRefresh" v-if="type === 'instance'" @click="!isRefresh && _refresh()" size="mini" > </el-button> </span> </el-tooltip> <el-button v-if="isRtTasks" style="vertical-align: middle;" type="primary" size="mini" icon="el-icon-back" @click="_rtNodesDag" > {{$t('Return_1')}} </el-button> <span> <el-button type="primary" icon="el-icon-switch-button" size="mini" v-if="(type === 'instance' || 'definition') " style="vertical-align: middle;" @click="_closeDAG"> {{$t('Close')}} </el-button> </span> <el-button style="vertical-align: middle;" type="primary" size="mini" :loading="spinnerLoading" @click="_saveChart" icon="el-icon-document-checked" > {{spinnerLoading ? 'Loading...' : $t('Save')}} </el-button> <span> <el-button style="vertical-align: middle;" type="primary" size="mini" :loading="spinnerLoading" @click="_version" :disabled="$route.params.id == null" icon="el-icon-info"> {{spinnerLoading ? 'Loading...' : $t('Version Info')}} </el-button> </span> </div> </div> <div class="scrollbar dag-container"> <div class="jtk-demo" id="jtk-demo"> <div class="jtk-demo-canvas canvas-wide statemachine-demo jtk-surface jtk-surface-nopan jtk-draggable" id="canvas" ></div> </div> </div> <el-drawer :visible.sync="drawer" size="" :with-header="false"> <m-versions :versionData = versionData :isInstance="type === 'instance'" @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion" @closeVersion="closeVersion"></m-versions> </el-drawer> <el-drawer :visible.sync="nodeDrawer" size="" :with-header="false"> <m-form-model v-if="nodeDrawer" :nodeData=nodeData @seeHistory="seeHistory" @addTaskInfo="addTaskInfo" @cacheTaskInfo="cacheTaskInfo" @close="close" @onSubProcess="onSubProcess"></m-form-model> </el-drawer> <el-drawer :visible.sync="lineDrawer" size="" :wrapperClosable="false" :with-header="false"> <m-form-line-model :lineData = lineData @addLineInfo="addLineInfo" @cancel="cancel"></m-form-line-model> </el-drawer> <el-drawer :visible.sync="udpDrawer" size="" :wrapperClosable="false" :with-header="false"> <m-udp></m-udp> </el-drawer> <el-dialog :title="$t('Set the DAG diagram name')" :visible.sync="dialogVisible" width="auto"> <m-udp ref="mUdp" @onUdp="onUdpDialog" @close="closeDialog"></m-udp> </el-dialog> <el-dialog :title="$t('Please set the parameters before starting')" :visible.sync="startDialog" width="auto"> <m-start :startData= "startData" :startNodeList="startNodeList" :sourceType="sourceType" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start> </el-dialog> </div> </div> </template> <script> import _ from 'lodash' import Dag from './dag' import mUdp from './udp/udp' import i18n from '@/module/i18n' import { jsPlumb } from 'jsplumb' import Clipboard from 'clipboard' import { allNodesId } from './plugIn/util' import { toolOper, tasksType } from './config' import mFormModel from './formModel/formModel' import mFormLineModel from './formModel/formLineModel' import { formatDate } from '@/module/filter/filter' import { findComponentDownward } from '@/module/util/' import disabledState from '@/module/mixin/disabledState' import { mapActions, mapState, mapMutations } from 'vuex' import mStart from '../../projects/pages/definition/pages/list/_source/start' import mVersions from '../../projects/pages/definition/pages/list/_source/versions' let eventModel export default { name: 'dag-chart', data () { return { tasksTypeList: tasksType, toolOperList: toolOper(this), dagBarId: null, toolOperCode: '', spinnerLoading: false, urlParam: { id: this.$route.params.id || null }, isRtTasks: false, isRefresh: false, isLoading: false, taskId: null, arg: false, versionData: { processDefinition: { id: null, version: '', releaseState: '' }, processDefinitionVersions: [], total: null, pageNo: null, pageSize: null }, drawer: false, nodeData: { id: null, taskType: '', self: {}, preNode: [], rearList: [], instanceId: null }, nodeDrawer: false, lineData: { id: null, sourceId: '', targetId: '' }, lineDrawer: false, udpDrawer: false, dialogVisible: false, startDialog: false, startData: {}, startNodeList: '', sourceType: '' } }, mixins: [disabledState], props: { type: String, releaseState: String }, methods: { ...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']), ...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects', 'resetLocalParam']), startRunning (item, startNodeList, sourceType) { this.startData = item this.startNodeList = startNodeList this.sourceType = sourceType this.startDialog = true }, onUpdateStart () { this.startDialog = false }, closeStart () { this.startDialog = false }, // DAG automatic layout dagAutomaticLayout () { if (this.store.state.dag.isEditDag) { this.$message.warning(`${i18n.$t('Please save the DAG before formatting')}`) return false } $('#canvas').html('') // Destroy round robin Dag.init({ dag: this, instance: jsPlumb.getInstance({ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ 'Arrow', { location: 1, id: 'arrow', length: 12, foldback: 0.8 } ], ['Label', { location: 0.5, id: 'label' }] ], Container: 'canvas', ConnectionsDetachable: true }) }) if (this.tasks.length) { Dag.backfill(true) if (this.type === 'instance') { this._getTaskState(false).then(res => {}) } } else { Dag.create() } }, init (args) { if (this.tasks.length) { Dag.backfill(args) // Process instances can view status if (this.type === 'instance') { this._getTaskState(false).then(res => {}) // Round robin acquisition status this.setIntervalP = setInterval(() => { this._getTaskState(true).then(res => {}) }, 90000) } } else { Dag.create() } }, /** * copy name */ _copyName () { let clipboard = new Clipboard('.copy-name') clipboard.on('success', e => { this.$message.success(`${i18n.$t('Copy success')}`) // Free memory clipboard.destroy() }) clipboard.on('error', e => { // Copy is not supported this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`) // Free memory clipboard.destroy() }) }, /** * Get state interface * @param isReset Whether to manually refresh */ _getTaskState (isReset) { return new Promise((resolve, reject) => { this.getTaskState(this.urlParam.id).then(res => { let data = res.list let state = res.processInstanceState let taskList = res.taskList let idArr = allNodesId() const titleTpl = (item, desc) => { let $item = _.filter(taskList, v => v.name === item.name)[0] return `<div style="text-align: left">${i18n.$t('Name')}:${$item.name}</br>${i18n.$t('State')}:${desc}</br>${i18n.$t('type')}:${$item.taskType}</br>${i18n.$t('host')}:${$item.host || '-'}</br>${i18n.$t('Retry Count')}:${$item.retryTimes}</br>${i18n.$t('Submit Time')}:${formatDate($item.submitTime)}</br>${i18n.$t('Start Time')}:${formatDate($item.startTime)}</br>${i18n.$t('End Time')}:${$item.endTime ? formatDate($item.endTime) : '-'}</br></div>` } // remove tip state dom $('.w').find('.state-p').html('') const newTask = [] data.forEach(v1 => { idArr.forEach(v2 => { if (v2.name === v1.name) { let dom = $(`#${v2.id}`) let state = dom.find('.state-p') let depState = '' taskList.forEach(item => { if (item.name === v1.name) { depState = item.state const params = item.taskParams ? JSON.parse(item.taskParams) : '' let localParam = params.localParams || [] newTask.push({ id: v2.id, localParam }) } }) dom.attr('data-state-id', v1.stateId) dom.attr('data-dependent-result', v1.dependentResult || '') dom.attr('data-dependent-depState', depState) state.append(`<strong class="${v1.icoUnicode} ${v1.isSpin ? 'as as-spin' : ''}" style="color:${v1.color}" data-toggle="tooltip" data-html="true" data-container="body"></strong>`) state.find('strong').attr('title', titleTpl(v2, v1.desc)) } }) }) if (state === 'PAUSE' || state === 'STOP' || state === 'FAILURE' || this.state === 'SUCCESS') { // Manual refresh does not regain large json if (isReset) { findComponentDownward(this.$root, `${this.type}-details`)._reset() } } if (!isReset) { this.resetLocalParam(newTask) } resolve() }) }) }, /** * Get the action bar id * @param item */ _getDagId (v) { // if (this.isDetails) { // return // } this.dagBarId = v }, /** * operating */ _ckOperation (item) { let is = true let code = '' if (item.disable) { return } if (this.toolOperCode === item.code) { this.toolOperCode = '' code = item.code is = false } else { this.toolOperCode = item.code code = this.toolOperCode is = true } // event type Dag.toolbarEvent({ item: item, code: code, is: is }) }, _operationClass (item) { return this.toolOperCode === item.code ? 'active' : '' // if (item.disable) { // return this.toolOperCode === item.code ? 'active' : '' // } else { // return 'disable' // } }, /** * Storage interface */ _save (sourceType) { return new Promise((resolve, reject) => { this.spinnerLoading = true // Storage store Dag.saveStore().then(res => { if (this._verifConditions(res.tasks)) { if (this.urlParam.id) { /** * Edit * @param saveInstanceEditDAGChart => Process instance editing * @param saveEditDAGChart => Process definition editing */ this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => { // this.$message.success(res.msg) this.$message({ message: res.msg, type: 'success', offset: 80 }) this.spinnerLoading = false // Jump process definition if (this.type === 'instance') { this.$router.push({ path: `/projects/${this.projectId}/instance/list/${this.urlParam.id}` }) } else { this.$router.push({ path: `/projects/${this.projectId}/definition/list/${this.urlParam.id}` }) } resolve() }).catch(e => { this.$message.error(e.msg || '') this.spinnerLoading = false reject(e) }) } else { // New this.saveDAGchart().then(res => { this.$message.success(res.msg) this.spinnerLoading = false // source @/conf/home/pages/dag/_source/editAffirmModel/index.js if (sourceType !== 'affirm') { // Jump process definition this.$router.push({ name: 'projects-definition-list' }) } resolve() }).catch(e => { this.$message.error(e.msg || '') this.setName('') this.spinnerLoading = false reject(e) }) } } }) }) }, _closeDAG () { let $name = this.$route.name if ($name && $name.indexOf('definition') !== -1) { this.$router.push({ name: 'projects-definition-list' }) } else { this.$router.push({ name: 'projects-instance-list' }) } }, _verifConditions (value) { let tasks = value let bool = true tasks.map(v => { if (v.type === 'CONDITIONS' && (v.conditionResult.successNode[0] === '' || v.conditionResult.successNode[0] === null || v.conditionResult.failedNode[0] === '' || v.conditionResult.failedNode[0] === null)) { bool = false return false } }) if (!bool) { this.$message.warning(`${i18n.$t('Successful branch flow and failed branch flow are required')}`) this.spinnerLoading = false return false } return true }, onUdpDialog () { this._save() this.dialogVisible = false }, closeDialog () { this.dialogVisible = false }, /** * Save chart */ _saveChart () { // Verify node if (!this.tasks.length) { this.$message.warning(`${i18n.$t('Failed to create node to save')}`) return } this.dialogVisible = true this.$nextTick(() => { this.$refs.mUdp.reloadParam() }) }, /** * Return to the previous child node */ _rtNodesDag () { let getIds = this.$route.query.subProcessIds let idsArr = getIds.split(',') let ids = idsArr.slice(0, idsArr.length - 1) let id = idsArr[idsArr.length - 1] let query = {} if (id !== idsArr[0]) { query = { subProcessIds: ids.join(',') } } let $name = this.$route.name.split('-') this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${id}`, query: query }) }, /** * Subprocess processing * @param subProcessId Subprocess ID */ _subProcessHandle (subProcessId) { let subProcessIds = [] let getIds = this.$route.query.subProcessIds if (getIds) { let newId = getIds.split(',') newId.push(this.urlParam.id) subProcessIds = newId } else { subProcessIds.push(this.urlParam.id) } let $name = this.$route.name.split('-') this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${subProcessId}`, query: { subProcessIds: subProcessIds.join(',') } }) }, /** * Refresh data */ _refresh () { this.isRefresh = true this._getTaskState(false).then(res => { setTimeout(() => { this.isRefresh = false this.$message.success(`${i18n.$t('Refresh status succeeded')}`) }, 2200) }) }, /** * View variables */ _toggleView () { findComponentDownward(this.$root, 'assist-dag-index')._toggleView() }, /** * Starting parameters */ _toggleParam () { findComponentDownward(this.$root, 'starting-params-dag-index')._toggleParam() }, addLineInfo ({ item, fromThis }) { this.addConnects(item) this.lineDrawer = false }, cancel ({ fromThis }) { this.lineDrawer = false }, /** * Create a node popup layer * @param Object id */ _createLineLabel ({ id, sourceId, targetId }) { this.lineData.id = id this.lineData.sourceId = sourceId this.lineData.targetId = targetId this.lineDrawer = true }, seeHistory (taskName) { this.nodeData.self.$router.push({ name: 'task-instance', query: { processInstanceId: this.nodeData.self.$route.params.id, taskName: taskName } }) }, addTaskInfo ({ item, fromThis }) { this.addTasks(item) this.nodeDrawer = false }, cacheTaskInfo ({ item, fromThis }) { this.cacheTasks(item) }, close ({ item, flag, fromThis }) { this.addTasks(item) // Edit status does not allow deletion of nodes if (flag) { jsPlumb.remove(this.nodeData.id) } this.nodeDrawer = false }, onSubProcess ({ subProcessId, fromThis }) { this._subProcessHandle(subProcessId) }, _createNodes ({ id, type }) { let self = this let preNode = [] let rearNode = [] let rearList = [] $('div[data-targetarr*="' + id + '"]').each(function () { rearNode.push($(this).attr('id')) }) if (rearNode.length > 0) { rearNode.forEach(v => { let rearobj = {} rearobj.value = $(`#${v}`).find('.name-p').text() rearobj.label = $(`#${v}`).find('.name-p').text() rearList.push(rearobj) }) } else { rearList = [] } let targetarr = $(`#${id}`).attr('data-targetarr') if (targetarr) { let nodearr = targetarr.split(',') nodearr.forEach(v => { let nodeobj = {} nodeobj.value = $(`#${v}`).find('.name-p').text() nodeobj.label = $(`#${v}`).find('.name-p').text() preNode.push(nodeobj) }) } else { preNode = [] } this.taskId = id type = type || self.dagBarId this.nodeData.id = id this.nodeData.taskType = type this.nodeData.self = self this.nodeData.preNode = preNode this.nodeData.rearList = rearList this.nodeData.instanceId = this.$route.params.id this.nodeDrawer = true }, removeEventModelById ($id) { if (eventModel && this.taskId === $id) { eventModel.remove() } }, /** * switch version in process definition version list * * @param version the version user want to change * @param processDefinitionId the process definition id * @param fromThis fromThis */ mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { this.$store.state.dag.isSwitchVersion = true this.switchProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success($t('Switch Version Successfully')) this.$router.push({ path: `/projects/${this.projectId}/definition/list/${processDefinitionId}` }) }).catch(e => { this.$store.state.dag.isSwitchVersion = false this.$message.error(e.msg || '') }) }, /** * Paging event of process definition versions * * @param pageNo page number * @param pageSize page size * @param processDefinitionId the process definition id of page version * @param fromThis fromThis */ mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionCode, fromThis }) { this.getProcessDefinitionVersionsPage({ pageNo: pageNo, pageSize: pageSize, processDefinitionCode: processDefinitionCode }).then(res => { this.versionData.processDefinitionVersions = res.data.lists this.versionData.total = res.data.totalCount this.versionData.pageSize = res.data.pageSize this.versionData.pageNo = res.data.currentPage }).catch(e => { this.$message.error(e.msg || '') }) }, /** * delete one version of process definition * * @param version the version need to delete * @param processDefinitionId the process definition id user want to delete * @param fromThis fromThis */ mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, processDefinitionCode, fromThis }) { this.deleteProcessDefinitionVersion({ version: version, processDefinitionId: processDefinitionId }).then(res => { this.$message.success(res.msg || '') this.mVersionGetProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: processDefinitionCode, fromThis: fromThis }) }).catch(e => { this.$message.error(e.msg || '') }) }, /** * query the process definition pagination version */ _version (item) { this.getProcessDefinitionVersionsPage({ pageNo: 1, pageSize: 10, processDefinitionCode: this.store.state.dag.code }).then(res => { let processDefinitionVersions = res.data.lists let total = res.data.totalCount let pageSize = res.data.pageSize let pageNo = res.data.currentPage this.versionData.processDefinition.id = this.urlParam.id this.versionData.processDefinition.code = this.store.state.dag.code this.versionData.processDefinition.version = this.$store.state.dag.version this.versionData.processDefinition.releaseState = this.releaseState this.versionData.processDefinitionVersions = processDefinitionVersions this.versionData.total = total this.versionData.pageNo = pageNo this.versionData.pageSize = pageSize this.drawer = true }).catch(e => { this.$message.error(e.msg || '') }) }, closeVersion () { this.drawer = false } }, watch: { tasks: { deep: true, handler (o) { // Edit state does not allow deletion of node a... this.setIsEditDag(true) } } }, created () { // Edit state does not allow deletion of node a... this.setIsEditDag(false) if (this.$route.query.subProcessIds) { this.isRtTasks = true } Dag.init({ dag: this, instance: jsPlumb.getInstance({ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ 'Arrow', { location: 1, id: 'arrow', length: 12, foldback: 0.8 } ], ['Label', { location: 0.5, id: 'label' }] ], Container: 'canvas', ConnectionsDetachable: true }) }) }, mounted () { this.init(this.arg) }, beforeDestroy () { this.resetParams() // Destroy round robin clearInterval(this.setIntervalP) }, destroyed () { if (eventModel) { eventModel.remove() } }, computed: { ...mapState('dag', ['tasks', 'locations', 'connects', 'isEditDag', 'name', 'projectId']) }, components: { mVersions, mFormModel, mFormLineModel, mUdp, mStart } } </script> <style lang="scss" rel="stylesheet/scss"> @import "./dag"; .operBtn { padding: 8px 6px; } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,352
[Bug][ui] input field cat't select multiple characters ,cat't copy
[Bug][ui] input field cat't select multiple characters ,cat't copy There are two kind of the input field of node definition , distinguish by background. One of them has a white background.When we input something in this One of them has a white background ,we can't select characters use mouse
https://github.com/apache/dolphinscheduler/issues/5352
https://github.com/apache/dolphinscheduler/pull/5829
bca92157a088c78d245ff60dfee2504ea8716c6a
8db1b042b2523bd3b6ed363303283308ea14b2cc
"2021-04-22T06:35:31Z"
java
"2021-07-27T15:54:07Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="udp-model"> <div class="scrollbar contpi-boxt"> <div> <el-input type="text" size="small" v-model="name" :disabled="router.history.current.name === 'projects-instance-details'" :placeholder="$t('Please enter name (required)')"> </el-input> </div> <template v-if="router.history.current.name !== 'projects-instance-details'"> <div style="padding-top: 12px;"> <el-input type="textarea" size="small" v-model="description" :autosize="{minRows:2}" :placeholder="$t('Please enter description(optional)')" autocomplete="off"> </el-input> </div> </template> <div class="title" style="padding-top: 6px;"> <span class="text-b">{{$t('select tenant')}}</span> <form-tenant v-model="tenantId"></form-tenant> </div> <div class="title" style="padding-top: 6px;"> <span class="text-b">{{$t('warning of timeout')}}</span> <span style="padding-left: 6px;"> <el-switch v-model="checkedTimeout" size="small"></el-switch> </span> </div> <div class="content" style="padding-bottom: 10px;" v-if="checkedTimeout"> <span> <el-input v-model="timeout" style="width: 160px;" maxlength="9" size="small"> <span slot="append">{{$t('Minute')}}</span> </el-input> </span> </div> <div class="title" style="padding-top: 6px;"> <span>{{$t('Set global')}}</span> </div> <div class="content"> <div> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="udpList" :hide="false"> </m-local-params> </div> </div> </div> <div class="bottom"> <div class="submit"> <template v-if="router.history.current.name === 'projects-definition-details'"> <div class="lint-pt"> <el-checkbox v-model="releaseState" size="small" :false-label="'OFFLINE'" :true-label="'ONLINE'">{{$t('Whether to go online the process definition')}}</el-checkbox> </div> </template> <template v-if="router.history.current.name === 'projects-instance-details'"> <div class="lint-pt"> <el-checkbox v-model="syncDefine" size="small">{{$t('Whether to update the process definition')}}</el-checkbox> </div> </template> <el-button type="text" size="small" @click="close()"> {{$t('Cancel')}} </el-button> <el-button type="primary" size="small" round :disabled="isDetails" @click="ok()">{{$t('Add')}}</el-button> </div> </div> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from '../formModel/tasks/_source/localParams' import disabledState from '@/module/mixin/disabledState' import Affirm from '../jumpAffirm' import FormTenant from './_source/selectTenant' export default { name: 'udp', data () { return { originalName: '', // dag name name: '', // dag description description: '', // Global custom parameters udpList: [], // Global custom parameters udpListCache: [], // Whether to go online the process definition releaseState: 'ONLINE', // Whether to update the process definition syncDefine: true, // Timeout alarm timeout: 0, tenantId: -1, // checked Timeout alarm checkedTimeout: true } }, mixins: [disabledState], props: { }, methods: { /** * udp data */ _onLocalParams (a) { this.udpList = a }, _verifTimeout () { const reg = /^[1-9]\d*$/ if (!reg.test(this.timeout)) { this.$message.warning(`${i18n.$t('Please enter a positive integer greater than 0')}`) return false } return true }, _accuStore () { const udp = _.cloneDeep(this.udpList) udp.forEach(u => { delete u.ifFixed }) this.store.commit('dag/setGlobalParams', udp) this.store.commit('dag/setName', _.cloneDeep(this.name)) this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout)) this.store.commit('dag/setTenantId', _.cloneDeep(this.tenantId)) this.store.commit('dag/setDesc', _.cloneDeep(this.description)) this.store.commit('dag/setSyncDefine', this.syncDefine) this.store.commit('dag/setReleaseState', this.releaseState) }, /** * submit */ ok () { if (!this.name) { this.$message.warning(`${i18n.$t('DAG graph name cannot be empty')}`) return } let _verif = () => { // verification udf if (!this.$refs.refLocalParams._verifProp()) { return } // verification timeout if (this.checkedTimeout && !this._verifTimeout()) { return } // Storage global globalParams this._accuStore() Affirm.setIsPop(false) this.$emit('onUdp') } if (this.originalName !== this.name) { this.store.dispatch('dag/verifDAGName', this.name).then(res => { _verif() }).catch(e => { this.$message.error(e.msg || '') }) } else { _verif() } }, /** * Close the popup */ close () { this.$emit('close') }, /** * reload localParam */ reloadParam () { const dag = _.cloneDeep(this.store.state.dag) let globalParams = _.cloneDeep(dag.globalParams) let udpList = [...globalParams] this.udpList = udpList this.udpListCache = udpList } }, watch: { checkedTimeout (val) { if (!val) { this.timeout = 0 this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout)) } } }, created () { const dag = _.cloneDeep(this.store.state.dag) this.name = dag.name this.originalName = dag.name this.description = dag.description this.syncDefine = dag.syncDefine this.releaseState = dag.releaseState this.timeout = dag.timeout || 0 this.checkedTimeout = this.timeout !== 0 this.$nextTick(() => { if (dag.tenantId > -1) { this.tenantId = dag.tenantId } else if (this.store.state.user.userInfo.tenantId) { this.tenantId = this.store.state.user.userInfo.tenantId } }) }, mounted () {}, components: { FormTenant, mLocalParams } } </script> <style lang="scss" rel="stylesheet/scss"> .udp-model { width: 624px; min-height: 420px; background: #fff; border-radius: 3px; padding:20px 0 ; position: relative; .contpi-boxt { max-height: 600px; overflow-y: scroll; padding:0 20px; } .title { line-height: 36px; padding-bottom: 10px; span { font-size: 16px; color: #333; } } .bottom{ position: absolute; bottom: 0; left: 0; width: 100%; text-align: right; height: 56px; line-height: 56px; border-top: 1px solid #DCDEDC; background: #fff; .submit { padding-right: 20px; margin-top: -4px; } .lint-pt { position: absolute; left: 20px; top: -2px; >label { font-weight: normal; } } } .content { padding-bottom: 50px; .user-def-params-model { .add { a { color: #0097e0; } } } } } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,904
[Bug][upgrade]dev branch upgrade mysql sql script error
**Describe the bug** ``` IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') ``` Create index type statement error **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5904
https://github.com/apache/dolphinscheduler/pull/5821
8db1b042b2523bd3b6ed363303283308ea14b2cc
0075523fdd7f170a626634ebe41c0cef4c36feaf
"2021-07-28T04:34:10Z"
java
"2021-07-28T09:22:39Z"
sql/dolphinscheduler_mysql.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for QRTZ_BLOB_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_BLOB_TRIGGERS`; CREATE TABLE `QRTZ_BLOB_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `BLOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `SCHED_NAME` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_BLOB_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_BLOB_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CALENDARS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CALENDARS`; CREATE TABLE `QRTZ_CALENDARS` ( `SCHED_NAME` varchar(120) NOT NULL, `CALENDAR_NAME` varchar(200) NOT NULL, `CALENDAR` blob NOT NULL, PRIMARY KEY (`SCHED_NAME`,`CALENDAR_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CALENDARS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CRON_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CRON_TRIGGERS`; CREATE TABLE `QRTZ_CRON_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `CRON_EXPRESSION` varchar(120) NOT NULL, `TIME_ZONE_ID` varchar(80) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_CRON_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CRON_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_FIRED_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_FIRED_TRIGGERS`; CREATE TABLE `QRTZ_FIRED_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `ENTRY_ID` varchar(200) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `FIRED_TIME` bigint(13) NOT NULL, `SCHED_TIME` bigint(13) NOT NULL, `PRIORITY` int(11) NOT NULL, `STATE` varchar(16) NOT NULL, `JOB_NAME` varchar(200) DEFAULT NULL, `JOB_GROUP` varchar(200) DEFAULT NULL, `IS_NONCONCURRENT` varchar(1) DEFAULT NULL, `REQUESTS_RECOVERY` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`ENTRY_ID`), KEY `IDX_QRTZ_FT_TRIG_INST_NAME` (`SCHED_NAME`,`INSTANCE_NAME`), KEY `IDX_QRTZ_FT_INST_JOB_REQ_RCVRY` (`SCHED_NAME`,`INSTANCE_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_FT_J_G` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_T_G` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_FT_TG` (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_FIRED_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_JOB_DETAILS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`; CREATE TABLE `QRTZ_JOB_DETAILS` ( `SCHED_NAME` varchar(120) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `JOB_CLASS_NAME` varchar(250) NOT NULL, `IS_DURABLE` varchar(1) NOT NULL, `IS_NONCONCURRENT` varchar(1) NOT NULL, `IS_UPDATE_DATA` varchar(1) NOT NULL, `REQUESTS_RECOVERY` varchar(1) NOT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_JOB_DETAILS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_LOCKS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_LOCKS`; CREATE TABLE `QRTZ_LOCKS` ( `SCHED_NAME` varchar(120) NOT NULL, `LOCK_NAME` varchar(40) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`LOCK_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_LOCKS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_PAUSED_TRIGGER_GRPS`; CREATE TABLE `QRTZ_PAUSED_TRIGGER_GRPS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SCHEDULER_STATE -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SCHEDULER_STATE`; CREATE TABLE `QRTZ_SCHEDULER_STATE` ( `SCHED_NAME` varchar(120) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `LAST_CHECKIN_TIME` bigint(13) NOT NULL, `CHECKIN_INTERVAL` bigint(13) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`INSTANCE_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SCHEDULER_STATE -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPLE_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPLE_TRIGGERS`; CREATE TABLE `QRTZ_SIMPLE_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `REPEAT_COUNT` bigint(7) NOT NULL, `REPEAT_INTERVAL` bigint(12) NOT NULL, `TIMES_TRIGGERED` bigint(10) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPLE_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPLE_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPROP_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPROP_TRIGGERS`; CREATE TABLE `QRTZ_SIMPROP_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `STR_PROP_1` varchar(512) DEFAULT NULL, `STR_PROP_2` varchar(512) DEFAULT NULL, `STR_PROP_3` varchar(512) DEFAULT NULL, `INT_PROP_1` int(11) DEFAULT NULL, `INT_PROP_2` int(11) DEFAULT NULL, `LONG_PROP_1` bigint(20) DEFAULT NULL, `LONG_PROP_2` bigint(20) DEFAULT NULL, `DEC_PROP_1` decimal(13,4) DEFAULT NULL, `DEC_PROP_2` decimal(13,4) DEFAULT NULL, `BOOL_PROP_1` varchar(1) DEFAULT NULL, `BOOL_PROP_2` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPROP_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPROP_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_TRIGGERS`; CREATE TABLE `QRTZ_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL, `PREV_FIRE_TIME` bigint(13) DEFAULT NULL, `PRIORITY` int(11) DEFAULT NULL, `TRIGGER_STATE` varchar(16) NOT NULL, `TRIGGER_TYPE` varchar(8) NOT NULL, `START_TIME` bigint(13) NOT NULL, `END_TIME` bigint(13) DEFAULT NULL, `CALENDAR_NAME` varchar(200) DEFAULT NULL, `MISFIRE_INSTR` smallint(2) DEFAULT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_J` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_C` (`SCHED_NAME`,`CALENDAR_NAME`), KEY `IDX_QRTZ_T_G` (`SCHED_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_STATE` (`SCHED_NAME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_STATE` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_G_STATE` (`SCHED_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NEXT_FIRE_TIME` (`SCHED_NAME`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST` (`SCHED_NAME`,`TRIGGER_STATE`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_access_token -- ---------------------------- DROP TABLE IF EXISTS `t_ds_access_token`; CREATE TABLE `t_ds_access_token` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `token` varchar(64) DEFAULT NULL COMMENT 'token', `expire_time` datetime DEFAULT NULL COMMENT 'end time of token ', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_access_token -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alert -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert`; CREATE TABLE `t_ds_alert` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `title` varchar(64) DEFAULT NULL COMMENT 'title', `content` text COMMENT 'Message content (can be email, can be SMS. Mail is stored in JSON map, and SMS is string)', `alert_status` tinyint(4) DEFAULT '0' COMMENT '0:wait running,1:success,2:failed', `log` text COMMENT 'log', `alertgroup_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alert -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alertgroup -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alertgroup`; CREATE TABLE `t_ds_alertgroup`( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alert_instance_ids` varchar (255) DEFAULT NULL COMMENT 'alert instance ids', `create_user_id` int(11) DEFAULT NULL COMMENT 'create user id', `group_name` varchar(255) DEFAULT NULL COMMENT 'group name', `description` varchar(255) DEFAULT NULL, `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_command`; CREATE TABLE `t_ds_command` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_datasource -- ---------------------------- DROP TABLE IF EXISTS `t_ds_datasource`; CREATE TABLE `t_ds_datasource` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(64) NOT NULL COMMENT 'data source name', `note` varchar(255) DEFAULT NULL COMMENT 'description', `type` tinyint(4) NOT NULL COMMENT 'data source type: 0:mysql,1:postgresql,2:hive,3:spark', `user_id` int(11) NOT NULL COMMENT 'the creator id', `connection_params` text NOT NULL COMMENT 'json connection params', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `t_ds_datasource_name_UN` (`name`, `type`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_datasource -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_error_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_error_command`; CREATE TABLE `t_ds_error_command` ( `id` int(11) NOT NULL COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'scheduler time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', `message` text COMMENT 'message', PRIMARY KEY (`id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC; -- ---------------------------- -- Records of t_ds_error_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition`; CREATE TABLE `t_ds_process_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out, unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE, UNIQUE KEY `code_unique` (`code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_definition -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition_log`; CREATE TABLE `t_ds_process_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out,unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_task_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition`; CREATE TABLE `t_ds_task_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), UNIQUE KEY `task_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_task_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition_log`; CREATE TABLE `t_ds_task_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` text COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation`; CREATE TABLE `t_ds_process_task_relation` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; CREATE TABLE `t_ds_process_task_relation_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_instance`; CREATE TABLE `t_ds_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'process instance name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process definition version', `process_definition_code` bigint(20) not NULL COMMENT 'process definition code', `state` tinyint(4) DEFAULT NULL COMMENT 'process instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `recovery` tinyint(4) DEFAULT NULL COMMENT 'process instance failover flag:0:normal,1:failover instance', `start_time` datetime DEFAULT NULL COMMENT 'process instance start time', `end_time` datetime DEFAULT NULL COMMENT 'process instance end time', `run_times` int(11) DEFAULT NULL COMMENT 'process instance run times', `host` varchar(135) DEFAULT NULL COMMENT 'process instance host', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type. 0: only current node,1:before the node,2:later nodes', `max_try_times` tinyint(4) DEFAULT '0' COMMENT 'max try times', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy. 0:end the process when node failed,1:continue running the other nodes when node failed', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type. 0:no warning,1:warning if process success,2:warning if process failed,3:warning if success', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `command_start_time` datetime DEFAULT NULL COMMENT 'command start time', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT '1' COMMENT 'flag', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `is_sub_process` int(11) DEFAULT '0' COMMENT 'flag, whether the process is sub process', `executor_id` int(11) NOT NULL COMMENT 'executor id', `history_cmd` text COMMENT 'history commands of process instance operation', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_index` (`process_definition_code`,`id`) USING BTREE, KEY `start_time_index` (`start_time`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- DROP TABLE IF EXISTS `t_ds_project`; CREATE TABLE `t_ds_project` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(100) DEFAULT NULL COMMENT 'project name', `code` bigint(20) NOT NULL COMMENT 'encoding', `description` varchar(200) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'creator id', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_project -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_queue -- ---------------------------- DROP TABLE IF EXISTS `t_ds_queue`; CREATE TABLE `t_ds_queue` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `queue_name` varchar(64) DEFAULT NULL COMMENT 'queue name', `queue` varchar(64) DEFAULT NULL COMMENT 'yarn queue name', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_queue -- ---------------------------- INSERT INTO `t_ds_queue` VALUES ('1', 'default', 'default', null, null); -- ---------------------------- -- Table structure for t_ds_relation_datasource_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_datasource_user`; CREATE TABLE `t_ds_relation_datasource_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `datasource_id` int(11) DEFAULT NULL COMMENT 'data source id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_datasource_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_process_instance`; CREATE TABLE `t_ds_relation_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `parent_process_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `parent_task_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `process_instance_id` int(11) DEFAULT NULL COMMENT 'child process instance id', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_project_user`; CREATE TABLE `t_ds_relation_project_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `project_id` int(11) DEFAULT NULL COMMENT 'project id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_project_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_resources_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_resources_user`; CREATE TABLE `t_ds_relation_resources_user` ( `id` int(11) NOT NULL AUTO_INCREMENT, `user_id` int(11) NOT NULL COMMENT 'user id', `resources_id` int(11) DEFAULT NULL COMMENT 'resource id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_resources_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_udfs_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_udfs_user`; CREATE TABLE `t_ds_relation_udfs_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'userid', `udf_id` int(11) DEFAULT NULL COMMENT 'udf id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_resources -- ---------------------------- DROP TABLE IF EXISTS `t_ds_resources`; CREATE TABLE `t_ds_resources` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alias` varchar(64) DEFAULT NULL COMMENT 'alias', `file_name` varchar(64) DEFAULT NULL COMMENT 'file name', `description` varchar(255) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'user id', `type` tinyint(4) DEFAULT NULL COMMENT 'resource type,0:FILE,1:UDF', `size` bigint(20) DEFAULT NULL COMMENT 'resource size', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `pid` int(11) DEFAULT NULL, `full_name` varchar(64) DEFAULT NULL, `is_directory` tinyint(4) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_resources_un` (`full_name`,`type`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_resources -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_schedules -- ---------------------------- DROP TABLE IF EXISTS `t_ds_schedules`; CREATE TABLE `t_ds_schedules` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `start_time` datetime NOT NULL COMMENT 'start time', `end_time` datetime NOT NULL COMMENT 'end time', `timezone_id` varchar(40) DEFAULT NULL COMMENT 'timezoneId', `crontab` varchar(255) NOT NULL COMMENT 'crontab description', `failure_strategy` tinyint(4) NOT NULL COMMENT 'failure strategy. 0:end,1:continue', `user_id` int(11) NOT NULL COMMENT 'user id', `release_state` tinyint(4) NOT NULL COMMENT 'release state. 0:offline,1:online ', `warning_type` tinyint(4) NOT NULL COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT '' COMMENT 'worker group id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_schedules -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_session -- ---------------------------- DROP TABLE IF EXISTS `t_ds_session`; CREATE TABLE `t_ds_session` ( `id` varchar(64) NOT NULL COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `ip` varchar(45) DEFAULT NULL COMMENT 'ip', `last_login_time` datetime DEFAULT NULL COMMENT 'last login time', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_session -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_task_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_instance`; CREATE TABLE `t_ds_task_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'task name', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_code` bigint(20) NOT NULL COMMENT 'task definition code', `task_definition_version` int(11) DEFAULT NULL COMMENT 'task definition version', `process_instance_id` int(11) DEFAULT NULL COMMENT 'process instance id', `state` tinyint(4) DEFAULT NULL COMMENT 'Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `submit_time` datetime DEFAULT NULL COMMENT 'task submit time', `start_time` datetime DEFAULT NULL COMMENT 'task start time', `end_time` datetime DEFAULT NULL COMMENT 'task end time', `host` varchar(135) DEFAULT NULL COMMENT 'host of task running on', `execute_path` varchar(200) DEFAULT NULL COMMENT 'task execute path in the host', `log_path` varchar(200) DEFAULT NULL COMMENT 'task log path', `alert_flag` tinyint(4) DEFAULT NULL COMMENT 'whether alert', `retry_times` int(4) DEFAULT '0' COMMENT 'task retry times', `pid` int(4) DEFAULT NULL COMMENT 'pid of task', `app_link` text COMMENT 'yarn app id', `task_params` text COMMENT 'job custom parameters', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `retry_interval` int(4) DEFAULT NULL COMMENT 'retry interval when task failed ', `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', `task_instance_priority` int(11) DEFAULT NULL COMMENT 'task instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `executor_id` int(11) DEFAULT NULL, `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time', `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_id` (`process_instance_id`) USING BTREE, CONSTRAINT `foreign_key_instance_id` FOREIGN KEY (`process_instance_id`) REFERENCES `t_ds_process_instance` (`id`) ON DELETE CASCADE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_task_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_tenant -- ---------------------------- DROP TABLE IF EXISTS `t_ds_tenant`; CREATE TABLE `t_ds_tenant` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `tenant_code` varchar(64) DEFAULT NULL COMMENT 'tenant code', `description` varchar(255) DEFAULT NULL, `queue_id` int(11) DEFAULT NULL COMMENT 'queue id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_tenant -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_udfs -- ---------------------------- DROP TABLE IF EXISTS `t_ds_udfs`; CREATE TABLE `t_ds_udfs` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `func_name` varchar(100) NOT NULL COMMENT 'UDF function name', `class_name` varchar(255) NOT NULL COMMENT 'class of udf', `type` tinyint(4) NOT NULL COMMENT 'Udf function type', `arg_types` varchar(255) DEFAULT NULL COMMENT 'arguments types', `database` varchar(255) DEFAULT NULL COMMENT 'data base', `description` varchar(255) DEFAULT NULL, `resource_id` int(11) NOT NULL COMMENT 'resource id', `resource_name` varchar(255) NOT NULL COMMENT 'resource name', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_udfs -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_user`; CREATE TABLE `t_ds_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'user id', `user_name` varchar(64) DEFAULT NULL COMMENT 'user name', `user_password` varchar(64) DEFAULT NULL COMMENT 'user password', `user_type` tinyint(4) DEFAULT NULL COMMENT 'user type, 0:administrator,1:ordinary user', `email` varchar(64) DEFAULT NULL COMMENT 'email', `phone` varchar(11) DEFAULT NULL COMMENT 'phone', `tenant_id` int(11) DEFAULT NULL COMMENT 'tenant id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `queue` varchar(64) DEFAULT NULL COMMENT 'queue', `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable', PRIMARY KEY (`id`), UNIQUE KEY `user_name_unique` (`user_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_worker_group -- ---------------------------- DROP TABLE IF EXISTS `t_ds_worker_group`; CREATE TABLE `t_ds_worker_group` ( `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', `name` varchar(255) NOT NULL COMMENT 'worker group name', `addr_list` text NULL DEFAULT NULL COMMENT 'worker addr list. split by [,]', `create_time` datetime NULL DEFAULT NULL COMMENT 'create time', `update_time` datetime NULL DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `name_unique` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_worker_group -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_version -- ---------------------------- DROP TABLE IF EXISTS `t_ds_version`; CREATE TABLE `t_ds_version` ( `id` int(11) NOT NULL AUTO_INCREMENT, `version` varchar(200) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `version_UNIQUE` (`version`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='version'; -- ---------------------------- -- Records of t_ds_version -- ---------------------------- INSERT INTO `t_ds_version` VALUES ('1', '1.4.0'); -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- INSERT INTO `t_ds_alertgroup`(alert_instance_ids, create_user_id, group_name, description, create_time, update_time) VALUES ("1,2", 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39'); -- ---------------------------- -- Records of t_ds_user -- ---------------------------- INSERT INTO `t_ds_user` VALUES ('1', 'admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22', null, 1); -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); DROP TABLE IF EXISTS `t_ds_plugin_define`; CREATE TABLE `t_ds_plugin_define` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', `plugin_params` text COMMENT 'plugin params', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; CREATE TABLE `t_ds_alert_plugin_instance` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_define_id` int NOT NULL, `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,904
[Bug][upgrade]dev branch upgrade mysql sql script error
**Describe the bug** ``` IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') ``` Create index type statement error **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5904
https://github.com/apache/dolphinscheduler/pull/5821
8db1b042b2523bd3b6ed363303283308ea14b2cc
0075523fdd7f170a626634ebe41c0cef4c36feaf
"2021-07-28T04:34:10Z"
java
"2021-07-28T09:22:39Z"
sql/dolphinscheduler_postgre.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS; DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS; DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE; DROP TABLE IF EXISTS QRTZ_LOCKS; DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS; DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS; DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS; DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS; DROP TABLE IF EXISTS QRTZ_TRIGGERS; DROP TABLE IF EXISTS QRTZ_JOB_DETAILS; DROP TABLE IF EXISTS QRTZ_CALENDARS; CREATE TABLE QRTZ_JOB_DETAILS( SCHED_NAME character varying(120) NOT NULL, JOB_NAME character varying(200) NOT NULL, JOB_GROUP character varying(200) NOT NULL, DESCRIPTION character varying(250) NULL, JOB_CLASS_NAME character varying(250) NOT NULL, IS_DURABLE boolean NOT NULL, IS_NONCONCURRENT boolean NOT NULL, IS_UPDATE_DATA boolean NOT NULL, REQUESTS_RECOVERY boolean NOT NULL, JOB_DATA bytea NULL); alter table QRTZ_JOB_DETAILS add primary key(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE TABLE QRTZ_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, JOB_NAME character varying(200) NOT NULL, JOB_GROUP character varying(200) NOT NULL, DESCRIPTION character varying(250) NULL, NEXT_FIRE_TIME BIGINT NULL, PREV_FIRE_TIME BIGINT NULL, PRIORITY INTEGER NULL, TRIGGER_STATE character varying(16) NOT NULL, TRIGGER_TYPE character varying(8) NOT NULL, START_TIME BIGINT NOT NULL, END_TIME BIGINT NULL, CALENDAR_NAME character varying(200) NULL, MISFIRE_INSTR SMALLINT NULL, JOB_DATA bytea NULL) ; alter table QRTZ_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_SIMPLE_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, REPEAT_COUNT BIGINT NOT NULL, REPEAT_INTERVAL BIGINT NOT NULL, TIMES_TRIGGERED BIGINT NOT NULL) ; alter table QRTZ_SIMPLE_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_CRON_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, CRON_EXPRESSION character varying(120) NOT NULL, TIME_ZONE_ID character varying(80)) ; alter table QRTZ_CRON_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_SIMPROP_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, STR_PROP_1 character varying(512) NULL, STR_PROP_2 character varying(512) NULL, STR_PROP_3 character varying(512) NULL, INT_PROP_1 INT NULL, INT_PROP_2 INT NULL, LONG_PROP_1 BIGINT NULL, LONG_PROP_2 BIGINT NULL, DEC_PROP_1 NUMERIC(13,4) NULL, DEC_PROP_2 NUMERIC(13,4) NULL, BOOL_PROP_1 boolean NULL, BOOL_PROP_2 boolean NULL) ; alter table QRTZ_SIMPROP_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_BLOB_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, BLOB_DATA bytea NULL) ; alter table QRTZ_BLOB_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_CALENDARS ( SCHED_NAME character varying(120) NOT NULL, CALENDAR_NAME character varying(200) NOT NULL, CALENDAR bytea NOT NULL) ; alter table QRTZ_CALENDARS add primary key(SCHED_NAME,CALENDAR_NAME); CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL) ; alter table QRTZ_PAUSED_TRIGGER_GRPS add primary key(SCHED_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_FIRED_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, ENTRY_ID character varying(200) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, INSTANCE_NAME character varying(200) NOT NULL, FIRED_TIME BIGINT NOT NULL, SCHED_TIME BIGINT NOT NULL, PRIORITY INTEGER NOT NULL, STATE character varying(16) NOT NULL, JOB_NAME character varying(200) NULL, JOB_GROUP character varying(200) NULL, IS_NONCONCURRENT boolean NULL, REQUESTS_RECOVERY boolean NULL) ; alter table QRTZ_FIRED_TRIGGERS add primary key(SCHED_NAME,ENTRY_ID); CREATE TABLE QRTZ_SCHEDULER_STATE ( SCHED_NAME character varying(120) NOT NULL, INSTANCE_NAME character varying(200) NOT NULL, LAST_CHECKIN_TIME BIGINT NOT NULL, CHECKIN_INTERVAL BIGINT NOT NULL) ; alter table QRTZ_SCHEDULER_STATE add primary key(SCHED_NAME,INSTANCE_NAME); CREATE TABLE QRTZ_LOCKS ( SCHED_NAME character varying(120) NOT NULL, LOCK_NAME character varying(40) NOT NULL) ; alter table QRTZ_LOCKS add primary key(SCHED_NAME,LOCK_NAME); CREATE INDEX IDX_QRTZ_J_REQ_RECOVERY ON QRTZ_JOB_DETAILS(SCHED_NAME,REQUESTS_RECOVERY); CREATE INDEX IDX_QRTZ_J_GRP ON QRTZ_JOB_DETAILS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_J ON QRTZ_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_JG ON QRTZ_TRIGGERS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_C ON QRTZ_TRIGGERS(SCHED_NAME,CALENDAR_NAME); CREATE INDEX IDX_QRTZ_T_G ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP); CREATE INDEX IDX_QRTZ_T_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_N_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_N_G_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_NEXT_FIRE_TIME ON QRTZ_TRIGGERS(SCHED_NAME,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_ST ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE_GRP ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_FT_TRIG_INST_NAME ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME); CREATE INDEX IDX_QRTZ_FT_INST_JOB_REQ_RCVRY ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY); CREATE INDEX IDX_QRTZ_FT_J_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_FT_JG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_FT_T_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP); -- -- Table structure for table t_ds_access_token -- DROP TABLE IF EXISTS t_ds_access_token; CREATE TABLE t_ds_access_token ( id int NOT NULL , user_id int DEFAULT NULL , token varchar(64) DEFAULT NULL , expire_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_alert -- DROP TABLE IF EXISTS t_ds_alert; CREATE TABLE t_ds_alert ( id int NOT NULL , title varchar(64) DEFAULT NULL , content text , alert_status int DEFAULT '0' , log text , alertgroup_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_alertgroup -- DROP TABLE IF EXISTS t_ds_alertgroup; CREATE TABLE t_ds_alertgroup( id int NOT NULL, alert_instance_ids varchar (255) DEFAULT NULL, create_user_id int4 DEFAULT NULL, group_name varchar(255) DEFAULT NULL, description varchar(255) DEFAULT NULL, create_time timestamp DEFAULT NULL, update_time timestamp DEFAULT NULL, PRIMARY KEY (id), CONSTRAINT t_ds_alertgroup_name_UN UNIQUE (group_name) ) ; -- -- Table structure for table t_ds_command -- DROP TABLE IF EXISTS t_ds_command; CREATE TABLE t_ds_command ( id int NOT NULL , command_type int DEFAULT NULL , process_definition_id int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , executor_id int DEFAULT NULL , update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_datasource -- DROP TABLE IF EXISTS t_ds_datasource; CREATE TABLE t_ds_datasource ( id int NOT NULL , name varchar(64) NOT NULL , note varchar(255) DEFAULT NULL , type int NOT NULL , user_id int NOT NULL , connection_params text NOT NULL , create_time timestamp NOT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id), CONSTRAINT t_ds_datasource_name_UN UNIQUE (name, type) ) ; -- -- Table structure for table t_ds_error_command -- DROP TABLE IF EXISTS t_ds_error_command; CREATE TABLE t_ds_error_command ( id int NOT NULL , command_type int DEFAULT NULL , executor_id int DEFAULT NULL , process_definition_id int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), message text , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_master_server -- -- -- Table structure for table t_ds_process_definition -- DROP TABLE IF EXISTS t_ds_process_definition; CREATE TABLE t_ds_process_definition ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , release_state int DEFAULT NULL , user_id int DEFAULT NULL , global_params text , locations text , connects text , warning_group_id int DEFAULT NULL , flag int DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int DEFAULT '-1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , CONSTRAINT process_definition_unique UNIQUE (name, project_code) , CONSTRAINT code_unique UNIQUE (code) ) ; create index process_definition_index on t_ds_process_definition (code,id); DROP TABLE IF EXISTS t_ds_process_definition_log; CREATE TABLE t_ds_process_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , release_state int DEFAULT NULL , user_id int DEFAULT NULL , global_params text , locations text , connects text , warning_group_id int DEFAULT NULL , flag int DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int DEFAULT '-1' , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP TABLE IF EXISTS t_ds_task_definition; CREATE TABLE t_ds_task_definition ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_params text , flag int DEFAULT NULL , task_priority int DEFAULT NULL , worker_group varchar(255) DEFAULT NULL , fail_retry_times int DEFAULT NULL , fail_retry_interval int DEFAULT NULL , timeout_flag int DEFAULT NULL , timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , resource_ids varchar(255) DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , CONSTRAINT task_definition_unique UNIQUE (name, project_code) ) ; create index task_definition_index on t_ds_task_definition (project_code,id); DROP TABLE IF EXISTS t_ds_task_definition_log; CREATE TABLE t_ds_task_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_params text , flag int DEFAULT NULL , task_priority int DEFAULT NULL , worker_group varchar(255) DEFAULT NULL , fail_retry_times int DEFAULT NULL , fail_retry_interval int DEFAULT NULL , timeout_flag int DEFAULT NULL , timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , resource_ids varchar(255) DEFAULT NULL , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP TABLE IF EXISTS t_ds_process_task_relation; CREATE TABLE t_ds_process_task_relation ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , post_task_version int DEFAULT '0' , condition_type int DEFAULT NULL , condition_params text , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP TABLE IF EXISTS t_ds_process_task_relation_log; CREATE TABLE t_ds_process_task_relation_log ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , post_task_version int DEFAULT '0' , condition_type int DEFAULT NULL , condition_params text , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_process_instance -- DROP TABLE IF EXISTS t_ds_process_instance; CREATE TABLE t_ds_process_instance ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_version int DEFAULT NULL , process_definition_code bigint DEFAULT NULL , state int DEFAULT NULL , recovery int DEFAULT NULL , start_time timestamp DEFAULT NULL , end_time timestamp DEFAULT NULL , run_times int DEFAULT NULL , host varchar(135) DEFAULT NULL , command_type int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , max_try_times int DEFAULT '0' , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , command_start_time timestamp DEFAULT NULL , global_params text , process_instance_json text , flag int DEFAULT '1' , update_time timestamp NULL , is_sub_process int DEFAULT '0' , executor_id int NOT NULL , history_cmd text , dependence_schedule_times text , process_instance_priority int DEFAULT NULL , worker_group varchar(64) , timeout int DEFAULT '0' , tenant_id int NOT NULL DEFAULT '-1' , var_pool text , PRIMARY KEY (id) ) ; create index process_instance_index on t_ds_process_instance (process_definition_code,id); create index start_time_index on t_ds_process_instance (start_time); -- -- Table structure for table t_ds_project -- DROP TABLE IF EXISTS t_ds_project; CREATE TABLE t_ds_project ( id int NOT NULL , name varchar(100) DEFAULT NULL , code bigint NOT NULL, description varchar(200) DEFAULT NULL , user_id int DEFAULT NULL , flag int DEFAULT '1' , create_time timestamp DEFAULT CURRENT_TIMESTAMP , update_time timestamp DEFAULT CURRENT_TIMESTAMP , PRIMARY KEY (id) ) ; create index user_id_index on t_ds_project (user_id); -- -- Table structure for table t_ds_queue -- DROP TABLE IF EXISTS t_ds_queue; CREATE TABLE t_ds_queue ( id int NOT NULL , queue_name varchar(64) DEFAULT NULL , queue varchar(64) DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_relation_datasource_user -- DROP TABLE IF EXISTS t_ds_relation_datasource_user; CREATE TABLE t_ds_relation_datasource_user ( id int NOT NULL , user_id int NOT NULL , datasource_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; ; -- -- Table structure for table t_ds_relation_process_instance -- DROP TABLE IF EXISTS t_ds_relation_process_instance; CREATE TABLE t_ds_relation_process_instance ( id int NOT NULL , parent_process_instance_id int DEFAULT NULL , parent_task_instance_id int DEFAULT NULL , process_instance_id int DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_relation_project_user -- DROP TABLE IF EXISTS t_ds_relation_project_user; CREATE TABLE t_ds_relation_project_user ( id int NOT NULL , user_id int NOT NULL , project_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; create index relation_project_user_id_index on t_ds_relation_project_user (user_id); -- -- Table structure for table t_ds_relation_resources_user -- DROP TABLE IF EXISTS t_ds_relation_resources_user; CREATE TABLE t_ds_relation_resources_user ( id int NOT NULL , user_id int NOT NULL , resources_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_relation_udfs_user -- DROP TABLE IF EXISTS t_ds_relation_udfs_user; CREATE TABLE t_ds_relation_udfs_user ( id int NOT NULL , user_id int NOT NULL , udf_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; ; -- -- Table structure for table t_ds_resources -- DROP TABLE IF EXISTS t_ds_resources; CREATE TABLE t_ds_resources ( id int NOT NULL , alias varchar(64) DEFAULT NULL , file_name varchar(64) DEFAULT NULL , description varchar(255) DEFAULT NULL , user_id int DEFAULT NULL , type int DEFAULT NULL , size bigint DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , pid int, full_name varchar(64), is_directory int, PRIMARY KEY (id), CONSTRAINT t_ds_resources_un UNIQUE (full_name, type) ) ; -- -- Table structure for table t_ds_schedules -- DROP TABLE IF EXISTS t_ds_schedules; CREATE TABLE t_ds_schedules ( id int NOT NULL , process_definition_id int NOT NULL , start_time timestamp NOT NULL , end_time timestamp NOT NULL , timezone_id varchar(40) default NULL , crontab varchar(255) NOT NULL , failure_strategy int NOT NULL , user_id int NOT NULL , release_state int NOT NULL , warning_type int NOT NULL , warning_group_id int DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), create_time timestamp NOT NULL , update_time timestamp NOT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_session -- DROP TABLE IF EXISTS t_ds_session; CREATE TABLE t_ds_session ( id varchar(64) NOT NULL , user_id int DEFAULT NULL , ip varchar(45) DEFAULT NULL , last_login_time timestamp DEFAULT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_task_instance -- DROP TABLE IF EXISTS t_ds_task_instance; CREATE TABLE t_ds_task_instance ( id int NOT NULL , name varchar(255) DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_code bigint NOT NULL, task_definition_version int DEFAULT NULL , process_instance_id int DEFAULT NULL , state int DEFAULT NULL , submit_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , end_time timestamp DEFAULT NULL , host varchar(135) DEFAULT NULL , execute_path varchar(200) DEFAULT NULL , log_path varchar(200) DEFAULT NULL , alert_flag int DEFAULT NULL , retry_times int DEFAULT '0' , pid int DEFAULT NULL , app_link text , task_params text , flag int DEFAULT '1' , retry_interval int DEFAULT NULL , max_retry_times int DEFAULT NULL , task_instance_priority int DEFAULT NULL , worker_group varchar(64), executor_id int DEFAULT NULL , first_submit_time timestamp DEFAULT NULL , delay_time int DEFAULT '0' , var_pool text , PRIMARY KEY (id), CONSTRAINT foreign_key_instance_id FOREIGN KEY(process_instance_id) REFERENCES t_ds_process_instance(id) ON DELETE CASCADE ) ; -- -- Table structure for table t_ds_tenant -- DROP TABLE IF EXISTS t_ds_tenant; CREATE TABLE t_ds_tenant ( id int NOT NULL , tenant_code varchar(64) DEFAULT NULL , description varchar(255) DEFAULT NULL , queue_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_udfs -- DROP TABLE IF EXISTS t_ds_udfs; CREATE TABLE t_ds_udfs ( id int NOT NULL , user_id int NOT NULL , func_name varchar(100) NOT NULL , class_name varchar(255) NOT NULL , type int NOT NULL , arg_types varchar(255) DEFAULT NULL , database varchar(255) DEFAULT NULL , description varchar(255) DEFAULT NULL , resource_id int NOT NULL , resource_name varchar(255) NOT NULL , create_time timestamp NOT NULL , update_time timestamp NOT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_user -- DROP TABLE IF EXISTS t_ds_user; CREATE TABLE t_ds_user ( id int NOT NULL , user_name varchar(64) DEFAULT NULL , user_password varchar(64) DEFAULT NULL , user_type int DEFAULT NULL , email varchar(64) DEFAULT NULL , phone varchar(11) DEFAULT NULL , tenant_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , queue varchar(64) DEFAULT NULL , state int DEFAULT 1 , PRIMARY KEY (id) ); comment on column t_ds_user.state is 'state 0:disable 1:enable'; -- -- Table structure for table t_ds_version -- DROP TABLE IF EXISTS t_ds_version; CREATE TABLE t_ds_version ( id int NOT NULL , version varchar(200) NOT NULL, PRIMARY KEY (id) ) ; create index version_index on t_ds_version(version); -- -- Table structure for table t_ds_worker_group -- DROP TABLE IF EXISTS t_ds_worker_group; CREATE TABLE t_ds_worker_group ( id bigint NOT NULL , name varchar(255) NOT NULL , addr_list text DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , CONSTRAINT name_unique UNIQUE (name) ) ; -- -- Table structure for table t_ds_worker_server -- DROP TABLE IF EXISTS t_ds_worker_server; CREATE TABLE t_ds_worker_server ( id int NOT NULL , host varchar(45) DEFAULT NULL , port int DEFAULT NULL , zk_directory varchar(64) DEFAULT NULL , res_info varchar(255) DEFAULT NULL , create_time timestamp DEFAULT NULL , last_heartbeat_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_access_token_id_sequence; CREATE SEQUENCE t_ds_access_token_id_sequence; ALTER TABLE t_ds_access_token ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_access_token_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_alert_id_sequence; CREATE SEQUENCE t_ds_alert_id_sequence; ALTER TABLE t_ds_alert ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alert_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_alertgroup_id_sequence; CREATE SEQUENCE t_ds_alertgroup_id_sequence; ALTER TABLE t_ds_alertgroup ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alertgroup_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_command_id_sequence; CREATE SEQUENCE t_ds_command_id_sequence; ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence; CREATE SEQUENCE t_ds_datasource_id_sequence; ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence; CREATE SEQUENCE t_ds_process_definition_id_sequence; ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_definition_log_id_sequence; CREATE SEQUENCE t_ds_process_definition_log_id_sequence; ALTER TABLE t_ds_process_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_log_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_definition_id_sequence; CREATE SEQUENCE t_ds_task_definition_id_sequence; ALTER TABLE t_ds_task_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_definition_log_id_sequence; CREATE SEQUENCE t_ds_task_definition_log_id_sequence; ALTER TABLE t_ds_task_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_log_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_task_relation_id_sequence; CREATE SEQUENCE t_ds_process_task_relation_id_sequence; ALTER TABLE t_ds_process_task_relation ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_task_relation_log_id_sequence; CREATE SEQUENCE t_ds_process_task_relation_log_id_sequence; ALTER TABLE t_ds_process_task_relation_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_log_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence; CREATE SEQUENCE t_ds_process_instance_id_sequence; ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_project_id_sequence; CREATE SEQUENCE t_ds_project_id_sequence; ALTER TABLE t_ds_project ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_project_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_queue_id_sequence; CREATE SEQUENCE t_ds_queue_id_sequence; ALTER TABLE t_ds_queue ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_queue_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_datasource_user_id_sequence; CREATE SEQUENCE t_ds_relation_datasource_user_id_sequence; ALTER TABLE t_ds_relation_datasource_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_datasource_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_process_instance_id_sequence; CREATE SEQUENCE t_ds_relation_process_instance_id_sequence; ALTER TABLE t_ds_relation_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_process_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_project_user_id_sequence; CREATE SEQUENCE t_ds_relation_project_user_id_sequence; ALTER TABLE t_ds_relation_project_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_resources_user_id_sequence; CREATE SEQUENCE t_ds_relation_resources_user_id_sequence; ALTER TABLE t_ds_relation_resources_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_resources_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_udfs_user_id_sequence; CREATE SEQUENCE t_ds_relation_udfs_user_id_sequence; ALTER TABLE t_ds_relation_udfs_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_udfs_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_resources_id_sequence; CREATE SEQUENCE t_ds_resources_id_sequence; ALTER TABLE t_ds_resources ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_resources_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_schedules_id_sequence; CREATE SEQUENCE t_ds_schedules_id_sequence; ALTER TABLE t_ds_schedules ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_schedules_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_instance_id_sequence; CREATE SEQUENCE t_ds_task_instance_id_sequence; ALTER TABLE t_ds_task_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_tenant_id_sequence; CREATE SEQUENCE t_ds_tenant_id_sequence; ALTER TABLE t_ds_tenant ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_tenant_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_udfs_id_sequence; CREATE SEQUENCE t_ds_udfs_id_sequence; ALTER TABLE t_ds_udfs ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_udfs_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_user_id_sequence; CREATE SEQUENCE t_ds_user_id_sequence; ALTER TABLE t_ds_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_version_id_sequence; CREATE SEQUENCE t_ds_version_id_sequence; ALTER TABLE t_ds_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_version_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_worker_group_id_sequence; CREATE SEQUENCE t_ds_worker_group_id_sequence; ALTER TABLE t_ds_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_group_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_worker_server_id_sequence; CREATE SEQUENCE t_ds_worker_server_id_sequence; ALTER TABLE t_ds_worker_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_server_id_sequence'); -- Records of t_ds_user?user : admin , password : dolphinscheduler123 INSERT INTO t_ds_user(user_name, user_password, user_type, email, phone, tenant_id, state, create_time, update_time) VALUES ('admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', 1, '2018-03-27 15:48:50', '2018-10-24 17:40:22'); -- Records of t_ds_alertgroup, default admin warning group INSERT INTO t_ds_alertgroup(alert_instance_ids, create_user_id, group_name, description, create_time, update_time) VALUES ('1,2', 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39'); -- Records of t_ds_queue,default queue name : default INSERT INTO t_ds_queue(queue_name, queue, create_time, update_time) VALUES ('default', 'default', '2018-11-29 10:22:33', '2018-11-29 10:22:33'); -- Records of t_ds_queue,default queue name : default INSERT INTO t_ds_version(version) VALUES ('1.4.0'); -- -- Table structure for table t_ds_plugin_define -- DROP TABLE IF EXISTS t_ds_plugin_define; CREATE TABLE t_ds_plugin_define ( id serial NOT NULL, plugin_name varchar(100) NOT NULL, plugin_type varchar(100) NOT NULL, plugin_params text NULL, create_time timestamp NULL, update_time timestamp NULL, CONSTRAINT t_ds_plugin_define_pk PRIMARY KEY (id), CONSTRAINT t_ds_plugin_define_un UNIQUE (plugin_name, plugin_type) ); -- -- Table structure for table t_ds_alert_plugin_instance -- DROP TABLE IF EXISTS t_ds_alert_plugin_instance; CREATE TABLE t_ds_alert_plugin_instance ( id serial NOT NULL, plugin_define_id int4 NOT NULL, plugin_instance_params text NULL, create_time timestamp NULL, update_time timestamp NULL, instance_name varchar(200) NULL, CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) );
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,904
[Bug][upgrade]dev branch upgrade mysql sql script error
**Describe the bug** ``` IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') ``` Create index type statement error **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5904
https://github.com/apache/dolphinscheduler/pull/5821
8db1b042b2523bd3b6ed363303283308ea14b2cc
0075523fdd7f170a626634ebe41c0cef4c36feaf
"2021-07-28T04:34:10Z"
java
"2021-07-28T09:22:39Z"
sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); -- uc_dolphin_T_t_ds_user_A_state drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_user_A_state; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_user_A_state() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_user' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='state') THEN ALTER TABLE t_ds_user ADD `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_user_A_state; DROP PROCEDURE uc_dolphin_T_t_ds_user_A_state; -- uc_dolphin_T_t_ds_tenant_A_tenant_name drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_tenant_A_tenant_name; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_tenant_A_tenant_name() BEGIN IF EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_tenant' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='tenant_name') THEN ALTER TABLE t_ds_tenant DROP `tenant_name`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_tenant_A_tenant_name; DROP PROCEDURE uc_dolphin_T_t_ds_tenant_A_tenant_name; -- uc_dolphin_T_t_ds_task_instance_A_first_submit_time drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='first_submit_time') THEN ALTER TABLE t_ds_task_instance ADD `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_first_submit_time(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time; -- uc_dolphin_T_t_ds_task_instance_A_delay_time drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='delay_time') THEN ALTER TABLE t_ds_task_instance ADD `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_delay_time(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time; -- uc_dolphin_T_t_ds_task_instance_A_var_pool drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_var_pool; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_task_instance ADD `var_pool` longtext NULL; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_var_pool(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool; -- uc_dolphin_T_t_ds_task_instance_A_add_task_code drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_add_task_code; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='task_code') THEN ALTER TABLE t_ds_task_instance ADD `task_code` bigint(20) NOT NULL COMMENT 'task definition code'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_add_task_code(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_code; -- uc_dolphin_T_t_ds_task_instance_A_add_task_definition_version drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_add_task_definition_version; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_definition_version() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='task_definition_version') THEN ALTER TABLE t_ds_task_instance ADD `task_definition_version` int(11) DEFAULT NULL COMMENT 'task definition version'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_add_task_definition_version(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_definition_version; -- uc_dolphin_T_t_ds_task_instance_A_add_task_params drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_add_task_params; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_params() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='task_params') THEN ALTER TABLE t_ds_task_instance ADD `task_params` text COMMENT 'job custom parameters'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_add_task_params(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_add_task_params; -- uc_dolphin_T_t_ds_process_instance_A_process_definition_version drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_process_definition_version; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_process_definition_version() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='process_definition_version') THEN ALTER TABLE t_ds_process_instance ADD `process_definition_version` int(11) DEFAULT NULL COMMENT 'process definition version'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_instance_A_process_definition_version(); DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_process_definition_version; -- uc_dolphin_T_t_ds_process_instance_A_process_definition_code drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_process_definition_code; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_process_definition_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='process_definition_code') THEN ALTER TABLE t_ds_process_instance ADD `process_definition_code` bigint(20) not NULL COMMENT 'process definition code'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_instance_A_process_definition_code(); DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_process_definition_code; -- uc_dolphin_T_t_ds_process_instance_A_var_pool drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_var_pool; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_process_instance ADD `var_pool` longtext NULL; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_instance_A_var_pool(); DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool; -- uc_dolphin_T_t_ds_process_definition_A_modify_by drop PROCEDURE if EXISTS ct_dolphin_T_t_ds_process_definition_version; delimiter d// CREATE PROCEDURE ct_dolphin_T_t_ds_process_definition_version() BEGIN CREATE TABLE IF NOT EXISTS `t_ds_process_definition_version` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `process_definition_json` longtext COMMENT 'process definition json content', `description` text, `global_params` text COMMENT 'global parameters', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `receivers` text COMMENT 'receivers', `receivers_cc` text COMMENT 'cc', `create_time` datetime DEFAULT NULL COMMENT 'create time', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', PRIMARY KEY (`id`), UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, KEY `process_definition_index` (`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; END; d// delimiter ; CALL ct_dolphin_T_t_ds_process_definition_version; DROP PROCEDURE ct_dolphin_T_t_ds_process_definition_version; -- uc_dolphin_T_t_ds_project_instance_A_add_code drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_project_instance_A_add_code; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_project_instance_A_add_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_project' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='code') THEN ALTER TABLE t_ds_project ADD `code` bigint(20) NOT NULL COMMENT 'encoding'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_project_instance_A_add_code(); DROP PROCEDURE uc_dolphin_T_t_ds_project_instance_A_add_code; -- uc_dolphin_T_t_ds_process_definition_A_add_code drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_A_add_code; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_A_add_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='code') THEN ALTER TABLE t_ds_process_definition ADD `code` bigint(20) NOT NULL COMMENT 'encoding'; ALTER TABLE t_ds_process_definition ADD UNIQUE KEY `code_unique` (`code`); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_A_add_code(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_add_code; -- uc_dolphin_T_t_ds_process_definition_A_add_project_code drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_A_add_project_code; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_A_add_project_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='project_code') THEN ALTER TABLE t_ds_process_definition ADD `project_code` bigint(20) NOT NULL COMMENT 'project code'; ALTER TABLE t_ds_process_definition DROP INDEX `process_definition_unique`, ADD UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE; ALTER TABLE t_ds_process_definition DROP `project_id`, DROP `process_definition_json`, DROP `receivers`, DROP `receivers_cc`, DROP `modify_by`, DROP `resource_ids`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_A_add_project_code(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_add_project_code; -- ---------------------------- -- Table structure for t_ds_task_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition`; CREATE TABLE `t_ds_task_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), UNIQUE KEY `task_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; create index task_definition_index on t_ds_task_definition (project_code,id); -- ---------------------------- -- Table structure for t_ds_task_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition_log`; CREATE TABLE `t_ds_task_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` text COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation`; CREATE TABLE `t_ds_process_task_relation` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition_log`; CREATE TABLE `t_ds_process_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out,unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; CREATE TABLE `t_ds_process_task_relation_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- DROP TABLE IF EXISTS `t_ds_plugin_define`; CREATE TABLE `t_ds_plugin_define` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', `plugin_params` text COMMENT 'plugin params', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; CREATE TABLE `t_ds_alert_plugin_instance` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_define_id` int NOT NULL, `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- uc_dolphin_T_t_ds_process_definition_A_warning_group_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_A_warning_group_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_A_warning_group_id(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id; -- uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition_version' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition_version ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; -- uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='alert_instance_ids') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN `alert_instance_ids` varchar (255) DEFAULT NULL COMMENT 'alert instance ids' AFTER `id`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids; -- uc_dolphin_T_t_ds_alertgroup_A_create_user_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_create_user_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_create_user_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='create_user_id') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN `create_user_id` int(11) DEFAULT NULL COMMENT 'create user id' AFTER `alert_instance_ids`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_create_user_id(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_create_user_id; -- uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.STATISTICS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND INDEX_NAME ='t_ds_alertgroup_name_UN') THEN ALTER TABLE t_ds_alertgroup ADD UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName; -- uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') THEN ALTER TABLE t_ds_datasource ADD UNIQUE KEY `t_ds_datasource_name_UN` (`name`, `type`); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName(); DROP PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName; -- uc_dolphin_T_t_ds_schedules_A_add_timezone drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_schedules_A_add_timezone; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_schedules' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='timezone_id') THEN ALTER TABLE t_ds_schedules ADD COLUMN `timezone_id` varchar(40) default NULL COMMENT 'schedule timezone id' AFTER `end_time`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_schedules_A_add_timezone(); DROP PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone; -- ---------------------------- -- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below -- ---------------------------- -- ALTER TABLE t_ds_alert DROP `show_type`, DROP `alert_type`, DROP `receivers`, DROP `receivers_cc`; -- ALTER TABLE t_ds_alertgroup DROP `group_type`; -- ALTER TABLE t_ds_process_definition DROP `receivers`, DROP `receivers_cc`; -- ALTER TABLE t_ds_process_definition_version DROP `receivers`, DROP `receivers_cc`; -- DROP TABLE IF EXISTS t_ds_relation_user_alertgroup; -- ALTER TABLE t_ds_command DROP `dependence`; -- ALTER TABLE t_ds_error_command DROP `dependence`;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,904
[Bug][upgrade]dev branch upgrade mysql sql script error
**Describe the bug** ``` IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') ``` Create index type statement error **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5904
https://github.com/apache/dolphinscheduler/pull/5821
8db1b042b2523bd3b6ed363303283308ea14b2cc
0075523fdd7f170a626634ebe41c0cef4c36feaf
"2021-07-28T04:34:10Z"
java
"2021-07-28T09:22:39Z"
sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_ddl.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -- uc_dolphin_T_t_ds_user_A_state delimiter ; DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_user_A_state(); delimiter d// CREATE FUNCTION uc_dolphin_T_t_ds_user_A_state() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_CATALOG=current_database() AND TABLE_SCHEMA=current_schema() AND TABLE_NAME='t_ds_user' AND COLUMN_NAME ='state') THEN ALTER TABLE t_ds_user ADD COLUMN state int DEFAULT 1; comment on column t_ds_user.state is 'state 0:disable 1:enable'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; select uc_dolphin_T_t_ds_user_A_state(); DROP FUNCTION uc_dolphin_T_t_ds_user_A_state(); -- uc_dolphin_T_t_ds_tenant_A_tenant_name delimiter ; DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_tenant_A_tenant_name(); delimiter d// CREATE FUNCTION uc_dolphin_T_t_ds_tenant_A_tenant_name() RETURNS void AS $$ BEGIN IF EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_CATALOG=current_database() AND TABLE_SCHEMA=current_schema() AND TABLE_NAME='t_ds_tenant' AND COLUMN_NAME ='tenant_name') THEN ALTER TABLE t_ds_tenant DROP COLUMN "tenant_name"; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; select uc_dolphin_T_t_ds_tenant_A_tenant_name(); DROP FUNCTION uc_dolphin_T_t_ds_tenant_A_tenant_name(); -- uc_dolphin_T_t_ds_task_instance_A_first_submit_time delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_first_submit_time() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='first_submit_time') THEN ALTER TABLE t_ds_task_instance ADD COLUMN first_submit_time timestamp DEFAULT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_first_submit_time(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time(); -- uc_dolphin_T_t_ds_task_instance_A_delay_time delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_delay_time() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='delay_time') THEN ALTER TABLE t_ds_task_instance ADD COLUMN delay_time int DEFAULT '0'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_delay_time(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time(); -- uc_dolphin_T_t_ds_task_instance_A_var_pool delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_var_pool() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_task_instance ADD COLUMN var_pool text; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_var_pool(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_var_pool(); -- uc_dolphin_T_t_ds_task_instance_A_task_code delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_task_code() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='task_code') THEN ALTER TABLE t_ds_task_instance ADD COLUMN task_code bigint NOT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_task_code(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_task_code(); -- uc_dolphin_T_t_ds_task_instance_A_task_definition_version delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_task_definition_version() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='task_definition_version') THEN ALTER TABLE t_ds_task_instance ADD COLUMN task_definition_version int DEFAULT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_task_definition_version(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_task_definition_version(); -- uc_dolphin_T_t_ds_task_instance_A_task_params delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_task_params() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND COLUMN_NAME ='task_params') THEN ALTER TABLE t_ds_task_instance ADD COLUMN task_params text; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_task_params(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_task_params(); -- uc_dolphin_T_t_ds_process_instance_A_process_definition_code delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_instance_A_process_definition_code() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND COLUMN_NAME ='process_definition_code') THEN ALTER TABLE t_ds_process_instance ADD COLUMN process_definition_code bigint DEFAULT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_instance_A_process_definition_code(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_instance_A_process_definition_code(); -- uc_dolphin_T_t_ds_process_instance_A_process_definition_version delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_instance_A_process_definition_version() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND COLUMN_NAME ='process_definition_version') THEN ALTER TABLE t_ds_process_instance ADD COLUMN process_definition_version int DEFAULT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_instance_A_process_definition_version(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_instance_A_process_definition_version(); -- uc_dolphin_T_t_ds_process_instance_A_var_pool delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_instance_A_var_pool() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_process_instance ADD COLUMN var_pool text; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_instance_A_var_pool(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_instance_A_var_pool(); -- uc_dolphin_T_t_ds_project_A_code delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_project_A_code() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_project' AND COLUMN_NAME ='code') THEN ALTER TABLE t_ds_project ADD COLUMN code bigint NOT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_project_A_code(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_project_A_code(); -- uc_dolphin_T_t_ds_process_definition_A_code delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_definition_A_code() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND COLUMN_NAME ='code') THEN ALTER TABLE t_ds_process_definition ADD COLUMN code bigint NOT NULL; ALTER TABLE t_ds_process_definition ADD CONSTRAINT code_unique UNIQUE (code); END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_definition_A_code(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_definition_A_code(); -- uc_dolphin_T_t_ds_process_definition_A_project_code delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_definition_A_project_code() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND COLUMN_NAME ='project_code') THEN ALTER TABLE t_ds_process_definition ADD COLUMN project_code bigint NOT NULL; ALTER TABLE t_ds_process_definition DROP CONSTRAINT process_definition_unique, ADD CONSTRAINT process_definition_unique UNIQUE (name, project_code); ALTER TABLE t_ds_process_definition DROP project_id, DROP process_definition_json, DROP receivers, DROP receivers_cc, DROP modify_by, DROP resource_ids; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_definition_A_project_code(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_definition_A_project_code(); -- uc_dolphin_T_t_ds_process_definition_A_modify_by delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_definition_version() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_process_definition_version ( id int NOT NULL , process_definition_id int NOT NULL , version int DEFAULT NULL , process_definition_json text , description text , global_params text , locations text , connects text , receivers text , receivers_cc text , create_time timestamp DEFAULT NULL , timeout int DEFAULT '0' , resource_ids varchar(64), PRIMARY KEY (id) ) ; create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version); DROP SEQUENCE IF EXISTS t_ds_process_definition_version_id_sequence; CREATE SEQUENCE t_ds_process_definition_version_id_sequence; ALTER TABLE t_ds_process_definition_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_version_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_process_definition_version(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_definition_version(); -- ct_dolphin_T_t_ds_task_definition delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_task_definition() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_task_definition ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_params text , flag int DEFAULT NULL , task_priority int DEFAULT NULL , worker_group varchar(255) DEFAULT NULL , fail_retry_times int DEFAULT NULL , fail_retry_interval int DEFAULT NULL , timeout_flag int DEFAULT NULL , timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , resource_ids varchar(255) DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , CONSTRAINT task_definition_unique UNIQUE (name, project_code) ) ; create index task_definition_index on t_ds_task_definition (project_code,id); DROP SEQUENCE IF EXISTS t_ds_task_definition_id_sequence; CREATE SEQUENCE t_ds_task_definition_id_sequence; ALTER TABLE t_ds_task_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_task_definition(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_task_definition(); -- ct_dolphin_T_t_ds_task_definition_log delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_task_definition_log() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_task_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_params text , flag int DEFAULT NULL , task_priority int DEFAULT NULL , worker_group varchar(255) DEFAULT NULL , fail_retry_times int DEFAULT NULL , fail_retry_interval int DEFAULT NULL , timeout_flag int DEFAULT NULL , timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , resource_ids varchar(255) DEFAULT NULL , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_task_definition_log_id_sequence; CREATE SEQUENCE t_ds_task_definition_log_id_sequence; ALTER TABLE t_ds_task_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_log_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_task_definition_log(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_task_definition_log(); -- ct_dolphin_T_t_ds_process_task_relation delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_task_relation() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_process_task_relation ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , post_task_version int DEFAULT '0' , condition_type int DEFAULT NULL , condition_params text , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_process_task_relation_id_sequence; CREATE SEQUENCE t_ds_process_task_relation_id_sequence; ALTER TABLE t_ds_process_task_relation ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_process_task_relation(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_task_relation(); -- ct_dolphin_T_t_ds_process_definition_log delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_definition_log() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_process_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , version int DEFAULT NULL , description text , project_code bigint DEFAULT NULL , release_state int DEFAULT NULL , user_id int DEFAULT NULL , global_params text , locations text , connects text , warning_group_id int DEFAULT NULL , flag int DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int DEFAULT '-1' , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_process_definition_log_id_sequence; CREATE SEQUENCE t_ds_process_definition_log_id_sequence; ALTER TABLE t_ds_process_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_log_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_process_definition_log(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_definition_log(); -- ct_dolphin_T_t_ds_process_task_relation_log delimiter d// CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_task_relation_log() RETURNS void AS $$ BEGIN CREATE TABLE IF NOT EXISTS t_ds_process_task_relation_log ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , post_task_version int DEFAULT '0' , condition_type int DEFAULT NULL , condition_params text , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_process_task_relation_log_id_sequence; CREATE SEQUENCE t_ds_process_task_relation_log_id_sequence; ALTER TABLE t_ds_process_task_relation_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_log_id_sequence'); END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT ct_dolphin_T_t_ds_process_task_relation_log(); DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_task_relation_log(); -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- DROP TABLE IF EXISTS t_ds_plugin_define; CREATE TABLE t_ds_plugin_define ( id serial NOT NULL, plugin_name varchar(100) NOT NULL, plugin_type varchar(100) NOT NULL, plugin_params text NULL, create_time timestamp NULL, update_time timestamp NULL, CONSTRAINT t_ds_plugin_define_pk PRIMARY KEY (id), CONSTRAINT t_ds_plugin_define_un UNIQUE (plugin_name, plugin_type) ); -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS t_ds_alert_plugin_instance; CREATE TABLE t_ds_alert_plugin_instance ( id serial NOT NULL, plugin_define_id int4 NOT NULL, plugin_instance_params text NULL, create_time timestamp NULL, update_time timestamp NULL, instance_name varchar(200) NULL, CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) ); -- uc_dolphin_T_t_ds_process_definition_A_warning_group_id delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_definition_A_warning_group_id() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition ADD COLUMN warning_group_id int4 DEFAULT NULL; COMMENT ON COLUMN t_ds_process_definition.warning_group_id IS 'alert group id'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_definition_A_warning_group_id(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_definition_A_warning_group_id(); -- uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition_version' AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition_version ADD COLUMN warning_group_id int4 DEFAULT NULL; COMMENT ON COLUMN t_ds_process_definition_version.warning_group_id IS 'alert group id'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id(); -- uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND COLUMN_NAME ='alert_instance_ids') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN alert_instance_ids varchar (255) DEFAULT NULL; COMMENT ON COLUMN t_ds_alertgroup.alert_instance_ids IS 'alert instance ids'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids(); -- uc_dolphin_T_t_ds_alertgroup_A_create_user_id delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_alertgroup_A_create_user_id() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND COLUMN_NAME ='create_user_id') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN create_user_id int4 DEFAULT NULL; COMMENT ON COLUMN t_ds_alertgroup.create_user_id IS 'create user id'; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_alertgroup_A_create_user_id(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_alertgroup_A_create_user_id(); -- uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM pg_stat_all_indexes WHERE relname='t_ds_alertgroup' AND indexrelname ='t_ds_alertgroup_name_UN') THEN ALTER TABLE t_ds_alertgroup ADD CONSTRAINT t_ds_alertgroup_name_UN UNIQUE (group_name); END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName(); -- uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM pg_stat_all_indexes WHERE relname='t_ds_datasource' AND indexrelname ='t_ds_datasource_name_UN') THEN ALTER TABLE t_ds_datasource ADD CONSTRAINT t_ds_datasource_name_UN UNIQUE (name, type); END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName(); -- uc_dolphin_T_t_ds_schedules_A_add_timezone delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_schedules_A_add_timezone() RETURNS void AS $$ BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_schedules' AND COLUMN_NAME ='timezone_id') THEN ALTER TABLE t_ds_schedules ADD COLUMN timezone_id varchar(40) DEFAULT NULL; END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_schedules_A_add_timezone(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_schedules_A_add_timezone(); -- ---------------------------- -- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below -- ---------------------------- -- ALTER TABLE t_ds_alert DROP COLUMN "show_type", DROP COLUMN "alert_type", DROP COLUMN "receivers", DROP COLUMN "receivers_cc"; -- ALTER TABLE t_ds_alertgroup DROP COLUMN "group_type"; -- ALTER TABLE t_ds_process_definition DROP COLUMN "receivers", DROP COLUMN "receivers_cc"; -- ALTER TABLE t_ds_process_definition_version DROP COLUMN "receivers", DROP COLUMN "receivers_cc"; -- DROP TABLE IF EXISTS t_ds_relation_user_alertgroup; -- ALTER TABLE t_ds_command DROP COLUMN "dependence"; -- ALTER TABLE t_ds_error_command DROP COLUMN "dependence";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,921
[Improvement][CI] Maven connection sometimes times out
## Describe the question I notice a lot of connection timeout errors in the CI build. For example: https://github.com/apache/dolphinscheduler/runs/3192300302?check_suite_focus=true ![image](https://user-images.githubusercontent.com/49934421/127519775-c5a97ba0-1790-4aec-8538-cdaa47af44f7.png) ## Describe alternatives you've considered Please see: https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080 We can add `-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120` to the build command.
https://github.com/apache/dolphinscheduler/issues/5921
https://github.com/apache/dolphinscheduler/pull/5924
2535db4c77ab09822aec57ac4dd9f586bd8ad476
0b4a20251ff2e609f59295d6b98bc83006ff2a25
"2021-07-29T15:27:24Z"
java
"2021-08-01T14:18:32Z"
.github/workflows/ci_backend.yml
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # name: Backend on: push: paths: - '.github/workflows/ci_backend.yml' - 'package.xml' - 'pom.xml' - 'dolphinscheduler-alert/**' - 'dolphinscheduler-api/**' - 'dolphinscheduler-common/**' - 'dolphinscheduler-dao/**' - 'dolphinscheduler-rpc/**' - 'dolphinscheduler-server/**' pull_request: paths: - '.github/workflows/ci_backend.yml' - 'package.xml' - 'pom.xml' - 'dolphinscheduler-alert/**' - 'dolphinscheduler-api/**' - 'dolphinscheduler-common/**' - 'dolphinscheduler-dao/**' - 'dolphinscheduler-rpc/**' - 'dolphinscheduler-server/**' jobs: Compile-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodule: true - name: Check License Header uses: apache/skywalking-eyes@ec88b7d850018c8983f87729ea88549e100c5c82 - name: Set up JDK 1.8 uses: actions/setup-java@v1 with: java-version: 1.8 - name: Compile run: mvn -B clean compile install -Prelease -Dmaven.test.skip=true - name: Check dependency license run: tools/dependencies/check-LICENSE.sh
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,921
[Improvement][CI] Maven connection sometimes times out
## Describe the question I notice a lot of connection timeout errors in the CI build. For example: https://github.com/apache/dolphinscheduler/runs/3192300302?check_suite_focus=true ![image](https://user-images.githubusercontent.com/49934421/127519775-c5a97ba0-1790-4aec-8538-cdaa47af44f7.png) ## Describe alternatives you've considered Please see: https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080 We can add `-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120` to the build command.
https://github.com/apache/dolphinscheduler/issues/5921
https://github.com/apache/dolphinscheduler/pull/5924
2535db4c77ab09822aec57ac4dd9f586bd8ad476
0b4a20251ff2e609f59295d6b98bc83006ff2a25
"2021-07-29T15:27:24Z"
java
"2021-08-01T14:18:32Z"
.github/workflows/ci_ut.yml
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # on: pull_request: push: branches: - dev env: LOG_DIR: /tmp/dolphinscheduler name: Unit Test jobs: build: name: Build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodule: true - name: Check License Header uses: apache/skywalking-eyes@ec88b7d850018c8983f87729ea88549e100c5c82 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Only enable review / suggestion here - uses: actions/cache@v1 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-maven- - name: Bootstrap database run: | sed -i "/image: bitnami\/postgresql/a\ ports:\n - 5432:5432" $(pwd)/docker/docker-swarm/docker-compose.yml sed -i "/image: bitnami\/zookeeper/a\ ports:\n - 2181:2181" $(pwd)/docker/docker-swarm/docker-compose.yml docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml up -d dolphinscheduler-zookeeper dolphinscheduler-postgresql until docker logs docker-swarm_dolphinscheduler-postgresql_1 2>&1 | grep 'listening on IPv4 address'; do echo "waiting for postgresql ready ..."; sleep 1; done docker run --rm --network docker-swarm_dolphinscheduler -v $(pwd)/sql/dolphinscheduler_postgre.sql:/docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql bitnami/postgresql:latest bash -c "PGPASSWORD=root psql -h docker-swarm_dolphinscheduler-postgresql_1 -U root -d dolphinscheduler -v ON_ERROR_STOP=1 -f /docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql" - name: Set up JDK 1.8 uses: actions/setup-java@v1 with: java-version: 1.8 - name: Git fetch unshallow run: | git fetch --unshallow git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" git fetch origin - name: Compile run: | export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx5g' mvn clean verify -B -Dmaven.test.skip=false - name: Upload coverage report to codecov run: | CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash) # Set up JDK 11 for SonarCloud. - name: Set up JDK 1.11 uses: actions/setup-java@v1 with: java-version: 1.11 - name: Run SonarCloud Analysis run: > mvn --batch-mode verify sonar:sonar -Dsonar.coverage.jacoco.xmlReportPaths=target/site/jacoco/jacoco.xml -Dmaven.test.skip=true -Dsonar.host.url=https://sonarcloud.io -Dsonar.organization=apache -Dsonar.core.codeCoveragePlugin=jacoco -Dsonar.projectKey=apache-dolphinscheduler -Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682 -Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/* env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - name: Collect logs run: | mkdir -p ${LOG_DIR} docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt continue-on-error: true Checkstyle: name: Check code style runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 with: submodule: true - name: check code style env: WORKDIR: ./ REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} CHECKSTYLE_CONFIG: style/checkstyle.xml REVIEWDOG_VERSION: v0.10.2 run: | wget -O - -q https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.43/checkstyle-8.43-all.jar > /opt/checkstyle.jar wget -O - -q https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh | sh -s -- -b /opt ${REVIEWDOG_VERSION} java -jar /opt/checkstyle.jar "${WORKDIR}" -c "${CHECKSTYLE_CONFIG}" -f xml \ | /opt/reviewdog -f=checkstyle \ -reporter="${INPUT_REPORTER:-github-pr-check}" \ -filter-mode="${INPUT_FILTER_MODE:-added}" \ -fail-on-error="${INPUT_FAIL_ON_ERROR:-false}"
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,921
[Improvement][CI] Maven connection sometimes times out
## Describe the question I notice a lot of connection timeout errors in the CI build. For example: https://github.com/apache/dolphinscheduler/runs/3192300302?check_suite_focus=true ![image](https://user-images.githubusercontent.com/49934421/127519775-c5a97ba0-1790-4aec-8538-cdaa47af44f7.png) ## Describe alternatives you've considered Please see: https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080 We can add `-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120` to the build command.
https://github.com/apache/dolphinscheduler/issues/5921
https://github.com/apache/dolphinscheduler/pull/5924
2535db4c77ab09822aec57ac4dd9f586bd8ad476
0b4a20251ff2e609f59295d6b98bc83006ff2a25
"2021-07-29T15:27:24Z"
java
"2021-08-01T14:18:32Z"
docker/build/hooks/build
#!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # set -e echo "------ dolphinscheduler start - build -------" printenv if [ -z "${VERSION}" ] then echo "set default environment variable [VERSION]" export VERSION=$(cat $(pwd)/pom.xml | grep '<version>' -m 1 | awk '{print $1}' | sed 's/<version>//' | sed 's/<\/version>//') fi if [ "${DOCKER_REPO}x" = "x" ] then echo "set default environment variable [DOCKER_REPO]" export DOCKER_REPO='apache/dolphinscheduler' fi echo "Version: $VERSION" echo "Repo: $DOCKER_REPO" echo -e "Current Directory is $(pwd)\n" # maven package(Project Directory) echo -e "mvn -B clean compile package -Prelease -Dmaven.test.skip=true" mvn -B clean compile package -Prelease -Dmaven.test.skip=true # mv dolphinscheduler-bin.tar.gz file to docker/build directory echo -e "mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-${VERSION}-bin.tar.gz $(pwd)/docker/build/\n" mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-${VERSION}-bin.tar.gz $(pwd)/docker/build/ # docker build BUILD_COMMAND="docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/docker/build/" echo -e "$BUILD_COMMAND\n" if (docker info 2> /dev/null | grep -i "ERROR"); then sudo $BUILD_COMMAND else $BUILD_COMMAND fi echo "------ dolphinscheduler end - build -------"
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,921
[Improvement][CI] Maven connection sometimes times out
## Describe the question I notice a lot of connection timeout errors in the CI build. For example: https://github.com/apache/dolphinscheduler/runs/3192300302?check_suite_focus=true ![image](https://user-images.githubusercontent.com/49934421/127519775-c5a97ba0-1790-4aec-8538-cdaa47af44f7.png) ## Describe alternatives you've considered Please see: https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080 We can add `-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120` to the build command.
https://github.com/apache/dolphinscheduler/issues/5921
https://github.com/apache/dolphinscheduler/pull/5924
2535db4c77ab09822aec57ac4dd9f586bd8ad476
0b4a20251ff2e609f59295d6b98bc83006ff2a25
"2021-07-29T15:27:24Z"
java
"2021-08-01T14:18:32Z"
docker/build/hooks/build.bat
:: Licensed to the Apache Software Foundation (ASF) under one or more :: contributor license agreements. See the NOTICE file distributed with :: this work for additional information regarding copyright ownership. :: The ASF licenses this file to You under the Apache License, Version 2.0 :: (the "License"); you may not use this file except in compliance with :: the License. You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. :: @echo off echo "------ dolphinscheduler start - build -------" set setlocal enableextensions enabledelayedexpansion if not defined VERSION ( echo "set environment variable [VERSION]" set first=1 for /f "tokens=3 delims=<>" %%a in ('findstr "<version>[0-9].*</version>" %cd%\pom.xml') do ( if !first! EQU 1 (set VERSION=%%a) set first=0 ) ) if not defined DOCKER_REPO ( echo "set environment variable [DOCKER_REPO]" set DOCKER_REPO=dolphinscheduler ) echo "Version: %VERSION%" echo "Repo: %DOCKER_REPO%" echo "Current Directory is %cd%" :: maven package(Project Directory) echo "call mvn clean compile package -Prelease" call mvn clean compile package -Prelease -DskipTests=true if "%errorlevel%"=="1" goto :mvnFailed :: move dolphinscheduler-bin.tar.gz file to docker/build directory echo "move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-%VERSION%-bin.tar.gz %cd%\docker\build\" move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-%VERSION%-bin.tar.gz %cd%\docker\build\ :: docker build echo "docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\docker\build\" docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\docker\build\ if "%errorlevel%"=="1" goto :dockerBuildFailed echo "------ dolphinscheduler end - build -------" :mvnFailed echo "MAVEN PACKAGE FAILED!" :dockerBuildFailed echo "DOCKER BUILD FAILED!"
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,975
[Bug][dolphinscheduler-dao] queryLastRunningProcess sql in ProcessInstanceMapper.xml
**Describe the bug** In method queryLastRunningProcess of ProcessInstanceMapper.xml, when judging the start time and the end time, the parentheses should be less.The version is latest. **Code** <select id="queryLastRunningProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} <if test="states !=null and states.length != 0"> and state in <foreach collection="states" item="i" index="index" open="(" separator="," close=")"> #{i} </foreach> </if> _<if test="startTime!=null and endTime != null "> and (schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime} or start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime}) </if>_ order by start_time desc limit 1 </select> **It should look like this in the slash** _<if test="startTime!=null and endTime != null "> and ((schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime}) or (start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime})) </if>_ Pls check it, thank you.
https://github.com/apache/dolphinscheduler/issues/5975
https://github.com/apache/dolphinscheduler/pull/5980
f7e0e9fecf92bf074bbff89eef9f98a8fc5ba45b
2afa625a753680313e8a6c5fb3a68e01e56f5caa
"2021-08-11T07:48:29Z"
java
"2021-08-13T01:28:24Z"
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml
<?xml version="1.0" encoding="UTF-8" ?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" > <mapper namespace="org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper"> <sql id="baseSql"> id, name, process_definition_version, process_definition_code, state, recovery, start_time, end_time, run_times,host, command_type, command_param, task_depend_type, max_try_times, failure_strategy, warning_type, warning_group_id, schedule_time, command_start_time, global_params, flag, update_time, is_sub_process, executor_id, history_cmd, process_instance_priority, worker_group, timeout, tenant_id, var_pool </sql> <select id="queryDetailById" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where id = #{processId} </select> <select id="queryByHostAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="host != null and host != ''"> and host=#{host} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryTopNProcessInstance" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where state = #{status} and start_time between #{startTime} and #{endTime} order by end_time-start_time desc limit #{size} </select> <select id="queryByTenantIdAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="tenantId != -1"> and tenant_id =#{tenantId} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryByWorkerGroupNameAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where 1=1 <if test="workerGroupName != ''"> and worker_group =#{workerGroupName} </if> and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <select id="queryProcessInstanceListPaging" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select instance.id, instance.command_type, instance.executor_id, instance.process_definition_version, instance.process_definition_code, instance.name, instance.state, instance.schedule_time, instance.start_time, instance.end_time, instance.run_times, instance.recovery, instance.host from t_ds_process_instance instance join t_ds_process_definition define ON instance.process_definition_code = define.code where instance.is_sub_process=0 and define.project_code = #{projectCode} <if test="processDefinitionCode != 0"> and instance.process_definition_code = #{processDefinitionCode} </if> <if test="searchVal != null and searchVal != ''"> and instance.name like concat('%', #{searchVal}, '%') </if> <if test="startTime != null "> and instance.start_time > #{startTime} and instance.start_time <![CDATA[ <=]]> #{endTime} </if> <if test="states != null and states.length > 0"> and instance.state in <foreach collection="states" index="index" item="i" open="(" separator="," close=")"> #{i} </foreach> </if> <if test="host != null and host != ''"> and instance.host like concat('%', #{host}, '%') </if> <if test="executorId != 0"> and instance.executor_id = #{executorId} </if> order by instance.start_time desc,instance.end_time desc </select> <update id="setFailoverByHostAndStateArray"> update t_ds_process_instance set host=null where host =#{host} and state in <foreach collection="states" index="index" item="i" open="(" close=")" separator=","> #{i} </foreach> </update> <update id="updateProcessInstanceByState"> update t_ds_process_instance set state = #{destState} where state = #{originState} </update> <update id="updateProcessInstanceByTenantId"> update t_ds_process_instance set tenant_id = #{destTenantId} where tenant_id = #{originTenantId} </update> <update id="updateProcessInstanceByWorkerGroupName"> update t_ds_process_instance set worker_group = #{destWorkerGroupName} where worker_group = #{originWorkerGroupName} </update> <select id="countInstanceStateByUser" resultType="org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount"> select t.state, count(0) as count from t_ds_process_instance t join t_ds_process_definition d on d.code=t.process_definition_code join t_ds_project p on p.code=d.project_code where 1 = 1 and t.is_sub_process = 0 <if test="startTime != null and endTime != null"> and t.start_time <![CDATA[ >= ]]> #{startTime} and t.start_time <![CDATA[ <= ]]> #{endTime} </if> <if test="projectCodes != null and projectCodes.length != 0"> and p.code in <foreach collection="projectCodes" index="index" item="i" open="(" close=")" separator=","> #{i} </foreach> </if> group by t.state </select> <select id="queryByProcessDefineCode" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} order by start_time desc limit #{size} </select> <select id="queryLastSchedulerProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} <if test="startTime!=null and endTime != null "> and schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime} </if> order by end_time desc limit 1 </select> <select id="queryLastRunningProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} <if test="states !=null and states.length != 0"> and state in <foreach collection="states" item="i" index="index" open="(" separator="," close=")"> #{i} </foreach> </if> <if test="startTime!=null and endTime != null "> and (schedule_time <![CDATA[ >= ]]> #{startTime} and schedule_time <![CDATA[ <= ]]> #{endTime} or start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime}) </if> order by start_time desc limit 1 </select> <select id="queryLastManualProcess" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} and schedule_time is null <if test="startTime!=null and endTime != null "> and start_time <![CDATA[ >= ]]> #{startTime} and start_time <![CDATA[ <= ]]> #{endTime} </if> order by end_time desc limit 1 </select> <select id="queryByProcessDefineCodeAndStatus" resultType="org.apache.dolphinscheduler.dao.entity.ProcessInstance"> select <include refid="baseSql"/> from t_ds_process_instance where process_definition_code=#{processDefinitionCode} and state in <foreach collection="states" item="i" open="(" close=")" separator=","> #{i} </foreach> order by id asc </select> <update id="updateGlobalParamsById"> update t_ds_process_instance set global_params = #{globalParams} where id = #{id} </update> </mapper>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CHECK_PROCESS_DEFINITION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.EXECUTE_PROCESS_INSTANCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.START_PROCESS_INSTANCE_ERROR; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** * executor controller */ @Api(tags = "EXECUTOR_TAG") @RestController @RequestMapping("projects/{projectName}/executors") public class ExecutorController extends BaseController { @Autowired private ExecutorService execService; /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process definition id * @param scheduleTime schedule time * @param failureStrategy failure strategy * @param startNodeList start nodes list * @param taskDependType task depend type * @param execType execute type * @param warningType warning type * @param warningGroupId warning group id * @param runMode run mode * @param processInstancePriority process instance priority * @param workerGroup worker group * @param timeout timeout * @return start process result code */ @ApiOperation(value = "startProcessInstance", notes = "RUN_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"), @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"), @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"), @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"), @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"), @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"), }) @PostMapping(value = "start-process-instance") @ResponseStatus(HttpStatus.OK) @ApiException(START_PROCESS_INSTANCE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionId") int processDefinitionId, @RequestParam(value = "scheduleTime", required = false) String scheduleTime, @RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, @RequestParam(value = "startNodeList", required = false) String startNodeList, @RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, @RequestParam(value = "execType", required = false) CommandType execType, @RequestParam(value = "warningType", required = true) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "runMode", required = false) RunMode runMode, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "timeout", required = false) Integer timeout, @RequestParam(value = "startParams", required = false) String startParams) { if (timeout == null) { timeout = Constants.MAX_TASK_TIMEOUT; } Map<String, String> startParamMap = null; if (startParams != null) { startParamMap = JSONUtils.toMap(startParams); } Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, startNodeList, taskDependType, warningType, warningGroupId, runMode, processInstancePriority, workerGroup, timeout, startParamMap); return returnDataList(result); } /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ @ApiOperation(value = "execute", notes = "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType") }) @PostMapping(value = "/execute") @ResponseStatus(HttpStatus.OK) @ApiException(EXECUTE_PROCESS_INSTANCE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("processInstanceId") Integer processInstanceId, @RequestParam("executeType") ExecuteType executeType ) { Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType); return returnDataList(result); } /** * check process definition and all of the son process definitions is on line. * * @param loginUser login user * @param processDefinitionId process definition id * @return check result code */ @ApiOperation(value = "startCheckProcessDefinition", notes = "START_CHECK_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/start-check") @ResponseStatus(HttpStatus.OK) @ApiException(CHECK_PROCESS_DEFINITION_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "processDefinitionId") int processDefinitionId) { Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * executor service */ public interface ExecutorService { /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process Definition Id * @param cronTime cron time * @param commandType command type * @param failureStrategy failuer strategy * @param startNodeList start nodelist * @param taskDependType node dependency type * @param warningType warning type * @param warningGroupId notify group id * @param processInstancePriority process instance priority * @param workerGroup worker group name * @param runMode run mode * @param timeout timeout * @param startParams the global param values which pass to new process instance * @return execute process instance code */ Map<String, Object> execProcessInstance(User loginUser, String projectName, int processDefinitionId, String cronTime, CommandType commandType, FailureStrategy failureStrategy, String startNodeList, TaskDependType taskDependType, WarningType warningType, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Integer timeout, Map<String, String> startParams); /** * check whether the process definition can be executed * * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode); /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType); /** * check if sub processes are offline before starting process definition * * @param processDefineId process definition id * @return check result code */ Map<String, Object> startCheckByProcessDefinedId(int processDefineId); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.commons.collections.MapUtils; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * executor service impl */ @Service public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService { private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class); @Autowired private ProjectMapper projectMapper; @Autowired private ProjectService projectService; @Autowired private ProcessDefinitionMapper processDefinitionMapper; @Autowired private MonitorService monitorService; @Autowired private ProcessInstanceMapper processInstanceMapper; @Autowired private ProcessService processService; /** * execute process instance * * @param loginUser login user * @param projectName project name * @param processDefinitionId process Definition Id * @param cronTime cron time * @param commandType command type * @param failureStrategy failure strategy * @param startNodeList start nodelist * @param taskDependType node dependency type * @param warningType warning type * @param warningGroupId notify group id * @param processInstancePriority process instance priority * @param workerGroup worker group name * @param runMode run mode * @param timeout timeout * @param startParams the global param values which pass to new process instance * @return execute process instance code */ @Override public Map<String, Object> execProcessInstance(User loginUser, String projectName, int processDefinitionId, String cronTime, CommandType commandType, FailureStrategy failureStrategy, String startNodeList, TaskDependType taskDependType, WarningType warningType, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Integer timeout, Map<String, String> startParams) { Map<String, Object> result = new HashMap<>(); // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); return result; } Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); if (checkResultAndAuth != null) { return checkResultAndAuth; } // check process define release state ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); result = checkProcessDefinitionValid(processDefinition, processDefinitionId); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); return result; } // check master exists if (!checkMasterExists(result)) { return result; } /** * create command */ int create = this.createCommand(commandType, processDefinitionId, taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), warningGroupId, runMode, processInstancePriority, workerGroup, startParams); if (create > 0) { processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); } return result; } /** * check whether master exists * * @param result result * @return master exists return true , otherwise return false */ private boolean checkMasterExists(Map<String, Object> result) { // check master server exists List<Server> masterServers = monitorService.getServerListFromRegistry(true); // no master if (masterServers.isEmpty()) { putMsg(result, Status.MASTER_NOT_EXISTS); return false; } return true; } /** * check whether the process definition can be executed * * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ @Override public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode) { Map<String, Object> result = new HashMap<>(); if (processDefinition == null) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineCode); } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { // check process definition online putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineCode); } else { result.put(Constants.STATUS, Status.SUCCESS); } return result; } /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id * @param executeType execute type * @return execute result code */ @Override public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project); if (checkResult != null) { return checkResult; } // check master exists if (!checkMasterExists(result)) { return result; } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (processInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionCode()); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } } checkResult = checkExecuteType(processInstance, executeType); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { return checkResult; } if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); } //get the startParams user specified at the first starting while repeat running is needed Map<String, Object> commandMap = JSONUtils.toMap(processInstance.getCommandParam(), String.class, Object.class); String startParams = null; if (MapUtils.isNotEmpty(commandMap) && executeType == ExecuteType.REPEAT_RUNNING) { Object startParamsJson = commandMap.get(Constants.CMD_PARAM_START_PARAMS); if (startParamsJson != null) { startParams = startParamsJson.toString(); } } switch (executeType) { case REPEAT_RUNNING: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING, startParams); break; case RECOVER_SUSPENDED_PROCESS: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS, startParams); break; case START_FAILURE_TASK_PROCESS: result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS, startParams); break; case STOP: if (processInstance.getState() == ExecutionStatus.READY_STOP) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); } break; case PAUSE: if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); } break; default: logger.error("unknown execute type : {}", executeType); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); break; } return result; } /** * check tenant suitable * * @param processDefinition process definition * @return true if tenant suitable, otherwise return false */ private boolean checkTenantSuitable(ProcessDefinition processDefinition) { Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), processDefinition.getUserId()); return tenant != null; } /** * Check the state of process instance and the type of operation match * * @param processInstance process instance * @param executeType execute type * @return check result code */ private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { Map<String, Object> result = new HashMap<>(); ExecutionStatus executionStatus = processInstance.getState(); boolean checkResult = false; switch (executeType) { case PAUSE: case STOP: if (executionStatus.typeIsRunning()) { checkResult = true; } break; case REPEAT_RUNNING: if (executionStatus.typeIsFinished()) { checkResult = true; } break; case START_FAILURE_TASK_PROCESS: if (executionStatus.typeIsFailure()) { checkResult = true; } break; case RECOVER_SUSPENDED_PROCESS: if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { checkResult = true; } break; default: break; } if (!checkResult) { putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); } else { putMsg(result, Status.SUCCESS); } return result; } /** * prepare to update process instance command type and status * * @param processInstance process instance * @param commandType command type * @param executionStatus execute status * @return update result */ private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { Map<String, Object> result = new HashMap<>(); processInstance.setCommandType(commandType); processInstance.addHistoryCmd(commandType); processInstance.setState(executionStatus); int update = processService.updateProcessInstance(processInstance); // determine whether the process is normal if (update > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } return result; } /** * insert command, used in the implementation of the page, re run, recovery (pause / failure) execution * * @param loginUser login user * @param instanceId instance id * @param processDefinitionId process definition id * @param commandType command type * @return insert result code */ private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType, String startParams) { Map<String, Object> result = new HashMap<>(); //To add startParams only when repeat running is needed Map<String, Object> cmdParam = new HashMap<>(); cmdParam.put(CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId); if (StringUtils.isNotEmpty(startParams)) { cmdParam.put(CMD_PARAM_START_PARAMS, startParams); } Command command = new Command(); command.setCommandType(commandType); command.setProcessDefinitionId(processDefinitionId); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(loginUser.getId()); if (!processService.verifyIsNeedCreateCommand(command)) { putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); return result; } int create = processService.createCommand(command); if (create > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } return result; } /** * check if sub processes are offline before starting process definition * * @param processDefineId process definition id * @return check result code */ @Override public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) { Map<String, Object> result = new HashMap<>(); if (processDefineId == 0) { logger.error("process definition id is null"); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); } List<Integer> ids = new ArrayList<>(); processService.recurseFindSubProcessId(processDefineId, ids); Integer[] idArray = ids.toArray(new Integer[ids.size()]); if (!ids.isEmpty()) { List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); if (processDefinitionList != null) { for (ProcessDefinition processDefinition : processDefinitionList) { /** * if there is no online process, exit directly */ if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); return result; } } } } putMsg(result, Status.SUCCESS); return result; } /** * create command * * @param commandType commandType * @param processDefineId processDefineId * @param nodeDep nodeDep * @param failureStrategy failureStrategy * @param startNodeList startNodeList * @param schedule schedule * @param warningType warningType * @param executorId executorId * @param warningGroupId warningGroupId * @param runMode runMode * @param processInstancePriority processInstancePriority * @param workerGroup workerGroup * @return command id */ private int createCommand(CommandType commandType, int processDefineId, TaskDependType nodeDep, FailureStrategy failureStrategy, String startNodeList, String schedule, WarningType warningType, int executorId, int warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, Map<String, String> startParams) { /** * instantiate command schedule instance */ Command command = new Command(); Map<String, String> cmdParam = new HashMap<>(); if (commandType == null) { command.setCommandType(CommandType.START_PROCESS); } else { command.setCommandType(commandType); } command.setProcessDefinitionId(processDefineId); if (nodeDep != null) { command.setTaskDependType(nodeDep); } if (failureStrategy != null) { command.setFailureStrategy(failureStrategy); } if (StringUtils.isNotEmpty(startNodeList)) { cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); } if (warningType != null) { command.setWarningType(warningType); } if (startParams != null && startParams.size() > 0) { cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); } command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(executorId); command.setWarningGroupId(warningGroupId); command.setProcessInstancePriority(processInstancePriority); command.setWorkerGroup(workerGroup); Date start = null; Date end = null; if (StringUtils.isNotEmpty(schedule)) { String[] interval = schedule.split(","); if (interval.length == 2) { start = DateUtils.getScheduleDate(interval[0]); end = DateUtils.getScheduleDate(interval[1]); } } // determine whether to complement if (commandType == CommandType.COMPLEMENT_DATA) { runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; if (null != start && null != end && !start.after(end)) { if (runMode == RunMode.RUN_MODE_SERIAL) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); } else if (runMode == RunMode.RUN_MODE_PARALLEL) { List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); List<Date> listDate = new LinkedList<>(); if (!CollectionUtils.isEmpty(schedules)) { for (Schedule item : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); } } if (!CollectionUtils.isEmpty(listDate)) { // loop by schedule date for (Date date : listDate) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); processService.createCommand(command); } return listDate.size(); } else { // loop by day int runCunt = 0; while (!start.after(end)) { runCunt += 1; cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); processService.createCommand(command); start = DateUtils.getSomeDay(start, 1); } return runCunt; } } } else { logger.error("there is not valid schedule date for the process definition: id:{}", processDefineId); } } else { command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); } return 0; } /** * check result and auth */ private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) { // check project auth Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { return checkResult; } return null; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # QUERY_SCHEDULE_LIST_NOTES=query schedule list EXECUTE_PROCESS_TAG=execute process related operation PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation RUN_PROCESS_INSTANCE_NOTES=run process instance START_NODE_LIST=start node list(node name) TASK_DEPEND_TYPE=task depend type COMMAND_TYPE=command type RUN_MODE=run mode TIMEOUT=timeout EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=execute action to process instance EXECUTE_TYPE=execute type START_CHECK_PROCESS_DEFINITION_NOTES=start check process definition GET_RECEIVER_CC_NOTES=query receiver cc DESC=description GROUP_NAME=group name GROUP_TYPE=group type QUERY_ALERT_GROUP_LIST_NOTES=query alert group list UPDATE_ALERT_GROUP_NOTES=update alert group DELETE_ALERT_GROUP_BY_ID_NOTES=delete alert group by id VERIFY_ALERT_GROUP_NAME_NOTES=verify alert group name, check alert group exist or not GRANT_ALERT_GROUP_NOTES=grant alert group USER_IDS=user id list EXECUTOR_TAG=executor operation EXECUTOR_NAME=executor name WORKER_GROUP=work group startParams=start parameters ALERT_GROUP_TAG=alert group related operation ALERT_PLUGIN_INSTANCE_TAG=alert plugin instance related operation WORK_FLOW_LINEAGE_TAG=work flow lineage related operation UI_PLUGINS_TAG=UI plugin related operation UPDATE_ALERT_PLUGIN_INSTANCE_NOTES=update alert plugin instance operation CREATE_ALERT_PLUGIN_INSTANCE_NOTES=create alert plugin instance operation DELETE_ALERT_PLUGIN_INSTANCE_NOTES=delete alert plugin instance operation QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES=query alert plugin instance paging QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=query topN longest running process instance ALERT_PLUGIN_INSTANCE_NAME=alert plugin instance name ALERT_PLUGIN_DEFINE_ID=alert plugin define id ALERT_PLUGIN_ID=alert plugin id ALERT_PLUGIN_INSTANCE_ID=alert plugin instance id ALERT_PLUGIN_INSTANCE_PARAMS=alert plugin instance parameters ALERT_INSTANCE_NAME=alert instance name VERIFY_ALERT_INSTANCE_NAME_NOTES=verify alert instance name DATA_SOURCE_PARAM=datasource parameter QUERY_ALL_ALERT_PLUGIN_INSTANCE_NOTES=query all alert plugin instances GET_ALERT_PLUGIN_INSTANCE_NOTES=get alert plugin instance operation CREATE_ALERT_GROUP_NOTES=create alert group WORKER_GROUP_TAG=worker group related operation SAVE_WORKER_GROUP_NOTES=create worker group WORKER_GROUP_NAME=worker group name WORKER_IP_LIST=worker ip list, eg. 192.168.1.1,192.168.1.2 QUERY_WORKER_GROUP_PAGING_NOTES=query worker group paging QUERY_WORKER_GROUP_LIST_NOTES=query worker group list DELETE_WORKER_GROUP_BY_ID_NOTES=delete worker group by id DATA_ANALYSIS_TAG=analysis related operation of task state COUNT_TASK_STATE_NOTES=count task state COUNT_PROCESS_INSTANCE_NOTES=count process instance state COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user COUNT_COMMAND_STATE_NOTES=count command state COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ ACCESS_TOKEN_TAG=access token related operation MONITOR_TAG=monitor related operation MASTER_LIST_NOTES=master server list WORKER_LIST_NOTES=worker server list QUERY_DATABASE_STATE_NOTES=query database state QUERY_ZOOKEEPER_STATE_NOTES=QUERY ZOOKEEPER STATE TASK_STATE=task instance state SOURCE_TABLE=SOURCE TABLE DEST_TABLE=dest table TASK_DATE=task date QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=query history task record list paging DATA_SOURCE_TAG=data source related operation CREATE_DATA_SOURCE_NOTES=create data source DATA_SOURCE_NAME=data source name DATA_SOURCE_NOTE=data source desc DB_TYPE=database type DATA_SOURCE_HOST=DATA SOURCE HOST DATA_SOURCE_PORT=data source port DATABASE_NAME=database name QUEUE_TAG=queue related operation QUERY_QUEUE_LIST_NOTES=query queue list QUERY_QUEUE_LIST_PAGING_NOTES=query queue list paging CREATE_QUEUE_NOTES=create queue YARN_QUEUE_NAME=yarn(hadoop) queue name QUEUE_ID=queue id TENANT_DESC=tenant desc QUERY_TENANT_LIST_PAGING_NOTES=query tenant list paging QUERY_TENANT_LIST_NOTES=query tenant list UPDATE_TENANT_NOTES=update tenant DELETE_TENANT_NOTES=delete tenant RESOURCES_TAG=resource center related operation CREATE_RESOURCE_NOTES=create resource RESOURCE_TYPE=resource file type RESOURCE_NAME=resource name RESOURCE_DESC=resource file desc RESOURCE_FILE=resource file RESOURCE_ID=resource id QUERY_RESOURCE_LIST_NOTES=query resource list DELETE_RESOURCE_BY_ID_NOTES=delete resource by id VIEW_RESOURCE_BY_ID_NOTES=view resource by id ONLINE_CREATE_RESOURCE_NOTES=online create resource SUFFIX=resource file suffix CONTENT=resource file content UPDATE_RESOURCE_NOTES=edit resource file online DOWNLOAD_RESOURCE_NOTES=download resource file CREATE_UDF_FUNCTION_NOTES=create udf function UDF_TYPE=UDF type FUNC_NAME=function name CLASS_NAME=package and class name ARG_TYPES=arguments UDF_DESC=udf desc VIEW_UDF_FUNCTION_NOTES=view udf function UPDATE_UDF_FUNCTION_NOTES=update udf function QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name DELETE_UDF_FUNCTION_NOTES=delete udf function AUTHORIZED_FILE_NOTES=authorized file UNAUTHORIZED_FILE_NOTES=unauthorized file AUTHORIZED_UDF_FUNC_NOTES=authorized udf func UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func VERIFY_QUEUE_NOTES=verify queue TENANT_TAG=tenant related operation CREATE_TENANT_NOTES=create tenant TENANT_CODE=os tenant code QUEUE_NAME=queue name PASSWORD=password DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} DATA_SOURCE_PRINCIPAL=principal DATA_SOURCE_KERBEROS_KRB5_CONF=the kerberos authentication parameter java.security.krb5.conf DATA_SOURCE_KERBEROS_KEYTAB_USERNAME=the kerberos authentication parameter login.user.keytab.username DATA_SOURCE_KERBEROS_KEYTAB_PATH=the kerberos authentication parameter login.user.keytab.path PROJECT_TAG=project related operation CREATE_PROJECT_NOTES=create project PROJECT_DESC=project description UPDATE_PROJECT_NOTES=update project PROJECT_ID=project id QUERY_PROJECT_BY_ID_NOTES=query project info by project id QUERY_PROJECT_LIST_PAGING_NOTES=QUERY PROJECT LIST PAGING QUERY_ALL_PROJECT_LIST_NOTES=query all project list DELETE_PROJECT_BY_ID_NOTES=delete project by id QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project TASK_RECORD_TAG=task record related operation QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging CREATE_TOKEN_NOTES=create token ,note: please login first QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging SCHEDULE=schedule WARNING_TYPE=warning type(sending strategy) WARNING_GROUP_ID=warning group id FAILURE_STRATEGY=failure strategy RECEIVERS=receivers RECEIVERS_CC=receivers cc WORKER_GROUP_ID=worker server group id PROCESS_INSTANCE_START_TIME=process instance start time PROCESS_INSTANCE_END_TIME=process instance end time PROCESS_INSTANCE_SIZE=process instance size PROCESS_INSTANCE_PRIORITY=process instance priority UPDATE_SCHEDULE_NOTES=update schedule SCHEDULE_ID=schedule id ONLINE_SCHEDULE_NOTES=online schedule OFFLINE_SCHEDULE_NOTES=offline schedule QUERY_SCHEDULE_NOTES=query schedule QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging LOGIN_TAG=User login related operations USER_NAME=user name PROJECT_NAME=project name CREATE_PROCESS_DEFINITION_NOTES=create process definition PROCESS_DEFINITION_NAME=process definition name PROCESS_DEFINITION_JSON=process definition detail info (json format) PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) PROCESS_DEFINITION_DESC=process definition desc PROCESS_DEFINITION_TAG=process definition related operation SIGNOUT_NOTES=logout USER_PASSWORD=user password UPDATE_PROCESS_INSTANCE_NOTES=update process instance QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list VERIFY_PROCESS_DEFINITION_NAME_NOTES=verify process definition name LOGIN_NOTES=user login UPDATE_PROCESS_DEFINITION_NOTES=update process definition PROCESS_DEFINITION_ID=process definition id PROCESS_DEFINITION_IDS=process definition ids PROCESS_DEFINITION_CODE=process definition code PROCESS_DEFINITION_CODE_LIST=process definition code list IMPORT_PROCESS_DEFINITION_NOTES=import process definition RELEASE_PROCESS_DEFINITION_NOTES=release process definition QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list PAGE_NO=page no PROCESS_INSTANCE_ID=process instance id PROCESS_INSTANCE_JSON=process instance info(json format) SCHEDULE_TIME=schedule time SYNC_DEFINE=update the information of the process instance to the process definition RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance PREVIEW_SCHEDULE_NOTES=preview schedule SEARCH_VAL=search val USER_ID=user id FORCE_TASK_SUCCESS=force task success QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=query task instance list paging PROCESS_INSTANCE_NAME=process instance name TASK_INSTANCE_ID=task instance id VERIFY_TENANT_CODE_NOTES=verify tenant code QUERY_UI_PLUGIN_DETAIL_BY_ID=query ui plugin detail by id PLUGIN_ID=plugin id QUERY_UI_PLUGINS_BY_TYPE=query ui plugins by type ACTIVATE_USER_NOTES=active user BATCH_ACTIVATE_USER_NOTES=batch active user STATE=state REPEAT_PASSWORD=repeat password REGISTER_USER_NOTES=register user USER_NAMES=user names PAGE_SIZE=page size LIMIT=limit CREATE_WORKER_GROUP_NOTES=create worker group WORKER_ADDR_LIST=worker address list QUERY_WORKER_ADDRESS_LIST_NOTES=query worker address list QUERY_WORKFLOW_LINEAGE_BY_IDS_NOTES=query workflow lineage by ids QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES=query workflow lineage by name VIEW_TREE_NOTES=view tree UDF_ID=udf id GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id GET_NODE_LIST_BY_DEFINITION_CODE_NOTES=get node list by definition code QUERY_PROCESS_DEFINITION_BY_NAME_NOTES=query process definition by name PROCESS_DEFINITION_ID_LIST=process definition id list QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query process definition all by project id DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_NOTES=batch delete process instance by process ids QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id TASK_ID=task instance id PROCESS_INSTANCE_IDS=process_instance ids SKIP_LINE_NUM=skip line num QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log USERS_TAG=users related operation SCHEDULER_TAG=scheduler related operation CREATE_SCHEDULE_NOTES=create schedule CREATE_USER_NOTES=create user TENANT_ID=tenant id QUEUE=queue EMAIL=email PHONE=phone QUERY_USER_LIST_NOTES=query user list UPDATE_USER_NOTES=update user UPDATE_QUEUE_NOTES=update queue DELETE_USER_BY_ID_NOTES=delete user by id GRANT_PROJECT_NOTES=GRANT PROJECT PROJECT_IDS=project ids(string format, multiple projects separated by ",") GRANT_RESOURCE_NOTES=grant resource file RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") GET_USER_INFO_NOTES=get user info LIST_USER_NOTES=list user VERIFY_USER_NAME_NOTES=verify user name UNAUTHORIZED_USER_NOTES=cancel authorization ALERT_GROUP_ID=alert group id AUTHORIZED_USER_NOTES=authorized user AUTHORIZE_RESOURCE_TREE_NOTES=authorize resource tree RESOURCE_CURRENTDIR=dir of the current resource QUERY_RESOURCE_LIST_PAGING_NOTES=query resource list paging RESOURCE_PID=parent directory ID of the current resource RESOURCE_FULL_NAME=resource full name QUERY_BY_RESOURCE_NAME=query by resource name QUERY_UDF_FUNC_LIST_NOTES=query udf funciton list VERIFY_RESOURCE_NAME_NOTES=verify resource name GRANT_UDF_FUNC_NOTES=grant udf function UDF_IDS=udf ids(string format, multiple udf functions separated by ",") GRANT_DATASOURCE_NOTES=grant datasource DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables VIEW_GANTT_NOTES=view gantt SUB_PROCESS_INSTANCE_ID=sub process instance id TASK_NAME=task instance name TASK_INSTANCE_TAG=task instance related operation LOGGER_TAG=log related operation PROCESS_INSTANCE_TAG=process instance related operation EXECUTION_STATUS=runing status for workflow and task nodes HOST=ip address of running task START_DATE=start date END_DATE=end date QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id UPDATE_DATA_SOURCE_NOTES=update data source DATA_SOURCE_ID=DATA SOURCE ID QUERY_DATA_SOURCE_NOTES=query data source by id QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=query data source list by database type QUERY_DATA_SOURCE_LIST_PAGING_NOTES=query data source list paging CONNECT_DATA_SOURCE_NOTES=CONNECT DATA SOURCE CONNECT_DATA_SOURCE_TEST_NOTES=connect data source test DELETE_DATA_SOURCE_NOTES=delete data source VERIFY_DATA_SOURCE_NOTES=verify data source UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source AUTHORIZED_DATA_SOURCE_NOTES=authorized data source DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=batch export process definition by ids QUERY_USER_CREATED_PROJECT_NOTES=query user created project QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES=query authorized and user created project COPY_PROCESS_DEFINITION_NOTES=copy process definition notes MOVE_PROCESS_DEFINITION_NOTES=move process definition notes TARGET_PROJECT_ID=target project id IS_COPY=is copy DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version VERSION=version
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # QUERY_SCHEDULE_LIST_NOTES=查询定时列表 PROCESS_INSTANCE_EXECUTOR_TAG=流程实例执行相关操作 UI_PLUGINS_TAG=UI插件相关操作 WORK_FLOW_LINEAGE_TAG=工作流血缘相关操作 RUN_PROCESS_INSTANCE_NOTES=运行流程实例 START_NODE_LIST=开始节点列表(节点name) TASK_DEPEND_TYPE=任务依赖类型 COMMAND_TYPE=指令类型 RUN_MODE=运行模式 TIMEOUT=超时时间 EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=执行流程实例的各种操作(暂停、停止、重跑、恢复等) EXECUTE_TYPE=执行类型 EXECUTOR_TAG=流程相关操作 EXECUTOR_NAME=流程名称 START_CHECK_PROCESS_DEFINITION_NOTES=检查流程定义 DESC=备注(描述) GROUP_NAME=组名称 WORKER_GROUP=worker群组 startParams=启动参数 GROUP_TYPE=组类型 QUERY_ALERT_GROUP_LIST_NOTES=告警组列表 UPDATE_ALERT_GROUP_NOTES=编辑(更新)告警组 DELETE_ALERT_GROUP_BY_ID_NOTES=通过ID删除告警组 VERIFY_ALERT_GROUP_NAME_NOTES=检查告警组是否存在 GRANT_ALERT_GROUP_NOTES=授权告警组 PROCESS_DEFINITION_IDS=流程定义ID PROCESS_DEFINITION_CODE=流程定义编码 PROCESS_DEFINITION_CODE_LIST=流程定义编码列表 USER_IDS=用户ID列表 ALERT_GROUP_TAG=告警组相关操作 WORKER_GROUP_TAG=Worker分组管理 SAVE_WORKER_GROUP_NOTES=创建Worker分组 ALERT_PLUGIN_INSTANCE_TAG=告警插件实例相关操作 WORKER_GROUP_NAME=Worker分组名称 WORKER_IP_LIST=Worker ip列表,注意:多个IP地址以逗号分割 QUERY_WORKER_GROUP_PAGING_NOTES=Worker分组管理 QUERY_WORKER_GROUP_LIST_NOTES=查询worker group分组 DELETE_WORKER_GROUP_BY_ID_NOTES=通过ID删除worker group DATA_ANALYSIS_TAG=任务状态分析相关操作 COUNT_TASK_STATE_NOTES=任务状态统计 COUNT_PROCESS_INSTANCE_NOTES=统计流程实例状态 COUNT_PROCESS_DEFINITION_BY_USER_NOTES=统计用户创建的流程定义 COUNT_COMMAND_STATE_NOTES=统计命令状态 COUNT_QUEUE_STATE_NOTES=统计队列里任务状态 ACCESS_TOKEN_TAG=访问token相关操作 MONITOR_TAG=监控相关操作 MASTER_LIST_NOTES=master服务列表 WORKER_LIST_NOTES=worker服务列表 QUERY_DATABASE_STATE_NOTES=查询数据库状态 QUERY_ZOOKEEPER_STATE_NOTES=查询Zookeeper状态 TASK_STATE=任务实例状态 SOURCE_TABLE=源表 DEST_TABLE=目标表 TASK_DATE=任务时间 QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=分页查询历史任务记录列表 DATA_SOURCE_TAG=数据源相关操作 CREATE_DATA_SOURCE_NOTES=创建数据源 DATA_SOURCE_NAME=数据源名称 DATA_SOURCE_NOTE=数据源描述 DB_TYPE=数据源类型 DATA_SOURCE_HOST=IP主机名 DATA_SOURCE_PORT=数据源端口 DATABASE_NAME=数据库名 QUEUE_TAG=队列相关操作 QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=查询topN最长运行流程实例 QUERY_QUEUE_LIST_NOTES=查询队列列表 QUERY_QUEUE_LIST_PAGING_NOTES=分页查询队列列表 CREATE_QUEUE_NOTES=创建队列 YARN_QUEUE_NAME=hadoop yarn队列名 QUEUE_ID=队列ID TENANT_DESC=租户描述 QUERY_TENANT_LIST_PAGING_NOTES=分页查询租户列表 QUERY_TENANT_LIST_NOTES=查询租户列表 UPDATE_TENANT_NOTES=更新租户 DELETE_TENANT_NOTES=删除租户 RESOURCES_TAG=资源中心相关操作 CREATE_RESOURCE_NOTES=创建资源 RESOURCE_FULL_NAME=资源全名 RESOURCE_TYPE=资源文件类型 RESOURCE_NAME=资源文件名称 RESOURCE_DESC=资源文件描述 RESOURCE_FILE=资源文件 RESOURCE_ID=资源ID QUERY_RESOURCE_LIST_NOTES=查询资源列表 QUERY_BY_RESOURCE_NAME=通过资源名称查询 QUERY_UDF_FUNC_LIST_NOTES=查询UDF函数列表 VERIFY_RESOURCE_NAME_NOTES=验证资源名称 DELETE_RESOURCE_BY_ID_NOTES=通过ID删除资源 VIEW_RESOURCE_BY_ID_NOTES=通过ID浏览资源 ONLINE_CREATE_RESOURCE_NOTES=在线创建资源 SUFFIX=资源文件后缀 CONTENT=资源文件内容 UPDATE_RESOURCE_NOTES=在线更新资源文件 DOWNLOAD_RESOURCE_NOTES=下载资源文件 CREATE_UDF_FUNCTION_NOTES=创建UDF函数 UDF_TYPE=UDF类型 FUNC_NAME=函数名称 CLASS_NAME=包名类名 ARG_TYPES=参数 UDF_DESC=udf描述,使用说明 VIEW_UDF_FUNCTION_NOTES=查看udf函数 UPDATE_UDF_FUNCTION_NOTES=更新udf函数 QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表 VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名 DELETE_UDF_FUNCTION_NOTES=删除UDF函数 AUTHORIZED_FILE_NOTES=授权文件 UNAUTHORIZED_FILE_NOTES=取消授权文件 AUTHORIZED_UDF_FUNC_NOTES=授权udf函数 UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权 VERIFY_QUEUE_NOTES=验证队列 TENANT_TAG=租户相关操作 CREATE_TENANT_NOTES=创建租户 TENANT_CODE=操作系统租户 QUEUE_NAME=队列名 PASSWORD=密码 DATA_SOURCE_OTHER=jdbc连接参数,格式为:{"key1":"value1",...} DATA_SOURCE_PRINCIPAL=principal DATA_SOURCE_KERBEROS_KRB5_CONF=kerberos认证参数 java.security.krb5.conf DATA_SOURCE_KERBEROS_KEYTAB_USERNAME=kerberos认证参数 login.user.keytab.username DATA_SOURCE_KERBEROS_KEYTAB_PATH=kerberos认证参数 login.user.keytab.path PROJECT_TAG=项目相关操作 CREATE_PROJECT_NOTES=创建项目 PROJECT_DESC=项目描述 UPDATE_PROJECT_NOTES=更新项目 PROJECT_ID=项目ID QUERY_PROJECT_BY_ID_NOTES=通过项目ID查询项目信息 QUERY_PROJECT_LIST_PAGING_NOTES=分页查询项目列表 QUERY_ALL_PROJECT_LIST_NOTES=查询所有项目 DELETE_PROJECT_BY_ID_NOTES=通过ID删除项目 QUERY_UNAUTHORIZED_PROJECT_NOTES=查询未授权的项目 QUERY_AUTHORIZED_PROJECT_NOTES=查询授权项目 TASK_RECORD_TAG=任务记录相关操作 QUERY_TASK_RECORD_LIST_PAGING_NOTES=分页查询任务记录列表 CREATE_TOKEN_NOTES=创建token,注意需要先登录 QUERY_ACCESS_TOKEN_LIST_NOTES=分页查询access token列表 SCHEDULE=定时 WARNING_TYPE=发送策略 WARNING_GROUP_ID=发送组ID FAILURE_STRATEGY=失败策略 RECEIVERS=收件人 RECEIVERS_CC=收件人(抄送) WORKER_GROUP_ID=Worker Server分组ID PROCESS_INSTANCE_PRIORITY=流程实例优先级 UPDATE_SCHEDULE_NOTES=更新定时 SCHEDULE_ID=定时ID ONLINE_SCHEDULE_NOTES=定时上线 OFFLINE_SCHEDULE_NOTES=定时下线 QUERY_SCHEDULE_NOTES=查询定时 QUERY_SCHEDULE_LIST_PAGING_NOTES=分页查询定时 LOGIN_TAG=用户登录相关操作 USER_NAME=用户名 PROJECT_NAME=项目名称 CREATE_PROCESS_DEFINITION_NOTES=创建流程定义 PROCESS_INSTANCE_START_TIME=流程实例启动时间 PROCESS_INSTANCE_END_TIME=流程实例结束时间 PROCESS_INSTANCE_SIZE=流程实例个数 PROCESS_DEFINITION_NAME=流程定义名称 PROCESS_DEFINITION_JSON=流程定义详细信息(json格式) PROCESS_DEFINITION_LOCATIONS=流程定义节点坐标位置信息(json格式) PROCESS_INSTANCE_LOCATIONS=流程实例节点坐标位置信息(json格式) PROCESS_DEFINITION_CONNECTS=流程定义节点图标连接信息(json格式) PROCESS_INSTANCE_CONNECTS=流程实例节点图标连接信息(json格式) PROCESS_DEFINITION_DESC=流程定义描述信息 PROCESS_DEFINITION_TAG=流程定义相关操作 SIGNOUT_NOTES=退出登录 USER_PASSWORD=用户密码 UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 VERIFY_PROCESS_DEFINITION_NAME_NOTES=验证流程定义名字 LOGIN_NOTES=用户登录 UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义 PROCESS_DEFINITION_ID=流程定义ID RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义 QUERY_PROCESS_DEFINITION_BY_ID_NOTES=通过流程定义ID查询流程定义 QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表 QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义 PAGE_NO=页码号 PROCESS_INSTANCE_ID=流程实例ID PROCESS_INSTANCE_IDS=流程实例ID集合 PROCESS_INSTANCE_JSON=流程实例信息(json格式) PREVIEW_SCHEDULE_NOTES=定时调度预览 SCHEDULE_TIME=定时时间 SYNC_DEFINE=更新流程实例的信息是否同步到流程定义 RECOVERY_PROCESS_INSTANCE_FLAG=是否恢复流程实例 SEARCH_VAL=搜索值 FORCE_TASK_SUCCESS=强制TASK成功 QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=分页查询任务实例列表 PROCESS_INSTANCE_NAME=流程实例名称 TASK_INSTANCE_ID=任务实例ID VERIFY_TENANT_CODE_NOTES=验证租户 QUERY_UI_PLUGIN_DETAIL_BY_ID=通过ID查询UI插件详情 QUERY_UI_PLUGINS_BY_TYPE=通过类型查询UI插件 ACTIVATE_USER_NOTES=激活用户 BATCH_ACTIVATE_USER_NOTES=批量激活用户 REPEAT_PASSWORD=重复密码 REGISTER_USER_NOTES=用户注册 STATE=状态 USER_NAMES=多个用户名 PLUGIN_ID=插件ID USER_ID=用户ID PAGE_SIZE=页大小 LIMIT=显示多少条 UDF_ID=udf ID AUTHORIZE_RESOURCE_TREE_NOTES=授权资源树 RESOURCE_CURRENTDIR=当前资源目录 RESOURCE_PID=资源父目录ID QUERY_RESOURCE_LIST_PAGING_NOTES=分页查询资源列表 VIEW_TREE_NOTES=树状图 IMPORT_PROCESS_DEFINITION_NOTES=导入流程定义 GET_NODE_LIST_BY_DEFINITION_ID_NOTES=通过流程定义ID获得任务节点列表 PROCESS_DEFINITION_ID_LIST=流程定义id列表 QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=通过项目ID查询流程定义 BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=通过流程定义ID集合批量删除流程定义 BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_NOTES=通过流程实例ID集合批量删除流程实例 DELETE_PROCESS_DEFINITION_BY_ID_NOTES=通过流程定义ID删除流程定义 QUERY_PROCESS_INSTANCE_BY_ID_NOTES=通过流程实例ID查询流程实例 DELETE_PROCESS_INSTANCE_BY_ID_NOTES=通过流程实例ID删除流程实例 TASK_ID=任务实例ID SKIP_LINE_NUM=忽略行数 QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 USERS_TAG=用户相关操作 SCHEDULER_TAG=定时相关操作 CREATE_SCHEDULE_NOTES=创建定时 CREATE_USER_NOTES=创建用户 CREATE_WORKER_GROUP_NOTES=创建Worker分组 WORKER_ADDR_LIST=worker地址列表 QUERY_WORKER_ADDRESS_LIST_NOTES=查询worker地址列表 QUERY_WORKFLOW_LINEAGE_BY_IDS_NOTES=通过IDs查询工作流血缘列表 QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES=通过名称查询工作流血缘列表 TENANT_ID=租户ID QUEUE=使用的队列 EMAIL=邮箱 PHONE=手机号 QUERY_USER_LIST_NOTES=查询用户列表 UPDATE_USER_NOTES=更新用户 UPDATE_QUEUE_NOTES=更新队列 DELETE_USER_BY_ID_NOTES=删除用户通过ID GRANT_PROJECT_NOTES=授权项目 PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) GRANT_RESOURCE_NOTES=授权资源文件 RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) GET_USER_INFO_NOTES=获取用户信息 GET_NODE_LIST_BY_DEFINITION_CODE_NOTES=通过流程定义编码查询节点列表 QUERY_PROCESS_DEFINITION_BY_NAME_NOTES=通过名称查询流程定义 LIST_USER_NOTES=用户列表 VERIFY_USER_NAME_NOTES=验证用户名 UNAUTHORIZED_USER_NOTES=取消授权 ALERT_GROUP_ID=报警组ID AUTHORIZED_USER_NOTES=授权用户 GRANT_UDF_FUNC_NOTES=授权udf函数 UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) GRANT_DATASOURCE_NOTES=授权数据源 DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=通过任务实例ID查询子流程实例 QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=通过子流程实例ID查询父流程实例信息 QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=查询流程实例全局变量和局部变量 VIEW_GANTT_NOTES=浏览Gantt图 SUB_PROCESS_INSTANCE_ID=子流程实例ID TASK_NAME=任务实例名 TASK_INSTANCE_TAG=任务实例相关操作 LOGGER_TAG=日志相关操作 PROCESS_INSTANCE_TAG=流程实例相关操作 EXECUTION_STATUS=工作流和任务节点的运行状态 HOST=运行任务的主机IP地址 START_DATE=开始时间 END_DATE=结束时间 QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=通过流程实例ID查询任务列表 DELETE_ALERT_PLUGIN_INSTANCE_NOTES=删除告警插件实例 CREATE_ALERT_PLUGIN_INSTANCE_NOTES=创建告警插件实例 GET_ALERT_PLUGIN_INSTANCE_NOTES=查询告警插件实例 QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES=分页查询告警实例列表 QUERY_ALL_ALERT_PLUGIN_INSTANCE_NOTES=查询所有告警实例列表 UPDATE_ALERT_PLUGIN_INSTANCE_NOTES=更新告警插件实例 ALERT_PLUGIN_INSTANCE_NAME=告警插件实例名称 ALERT_PLUGIN_DEFINE_ID=告警插件定义ID ALERT_PLUGIN_ID=告警插件ID ALERT_PLUGIN_INSTANCE_ID=告警插件实例ID ALERT_PLUGIN_INSTANCE_PARAMS=告警插件实例参数 ALERT_INSTANCE_NAME=告警插件名称 VERIFY_ALERT_INSTANCE_NAME_NOTES=验证告警插件名称 UPDATE_DATA_SOURCE_NOTES=更新数据源 DATA_SOURCE_PARAM=数据源参数 DATA_SOURCE_ID=数据源ID CREATE_ALERT_GROUP_NOTES=创建告警组 QUERY_DATA_SOURCE_NOTES=查询数据源通过ID QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=通过数据源类型查询数据源列表 QUERY_DATA_SOURCE_LIST_PAGING_NOTES=分页查询数据源列表 CONNECT_DATA_SOURCE_NOTES=连接数据源 CONNECT_DATA_SOURCE_TEST_NOTES=连接数据源测试 DELETE_DATA_SOURCE_NOTES=删除数据源 VERIFY_DATA_SOURCE_NOTES=验证数据源 UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源 AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源 DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表 EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义 BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义 QUERY_USER_CREATED_PROJECT_NOTES=查询用户创建的项目 QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES=查询授权和用户创建的项目 COPY_PROCESS_DEFINITION_NOTES=复制工作流定义 MOVE_PROCESS_DEFINITION_NOTES=移动工作流定义 TARGET_PROJECT_ID=目标项目ID IS_COPY=是否复制 DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本 QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息 SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本 VERSION=版本号
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ExecutorServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** * executor service 2 test */ @RunWith(MockitoJUnitRunner.Silent.class) public class ExecutorService2Test { @InjectMocks private ExecutorServiceImpl executorService; @Mock private ProcessService processService; @Mock private ProcessDefinitionMapper processDefinitionMapper; @Mock private ProjectMapper projectMapper; @Mock private ProjectServiceImpl projectService; @Mock private MonitorService monitorService; private int processDefinitionId = 1; private int processInstanceId = 1; private int tenantId = 1; private int userId = 1; private ProcessDefinition processDefinition = new ProcessDefinition(); private ProcessInstance processInstance = new ProcessInstance(); private User loginUser = new User(); private String projectName = "projectName"; private Project project = new Project(); private String cronTime; @Before public void init() { // user loginUser.setId(userId); // processDefinition processDefinition.setId(processDefinitionId); processDefinition.setReleaseState(ReleaseState.ONLINE); processDefinition.setTenantId(tenantId); processDefinition.setUserId(userId); processDefinition.setVersion(1); processDefinition.setCode(1L); // processInstance processInstance.setId(processInstanceId); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setExecutorId(userId); processInstance.setTenantId(tenantId); processInstance.setProcessDefinitionVersion(1); processInstance.setProcessDefinitionCode(1L); // project project.setName(projectName); // cronRangeTime cronTime = "2020-01-01 00:00:00,2020-01-31 23:00:00"; // mock Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkProjectAndAuth()); Mockito.when(processDefinitionMapper.selectById(processDefinitionId)).thenReturn(processDefinition); Mockito.when(processService.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant()); Mockito.when(processService.createCommand(any(Command.class))).thenReturn(1); Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(getMasterServersList()); Mockito.when(processService.findProcessInstanceDetailById(processInstanceId)).thenReturn(processInstance); Mockito.when(processService.findProcessDefinition(1L, 1)).thenReturn(processDefinition); } /** * not complement */ @Test public void testNoComplement() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.START_PROCESS, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * not complement */ @Test public void testComplementWithStartNodeList() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.START_PROCESS, null, "n1,n2", null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * date error */ @Test public void testDateError() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, "2020-01-31 23:00:00,2020-01-01 00:00:00", CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS)); verify(processService, times(0)).createCommand(any(Command.class)); } /** * serial */ @Test public void testSerial() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_SERIAL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); } /** * without schedule */ @Test public void testParallelWithOutSchedule() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(31)).createCommand(any(Command.class)); } /** * with schedule */ @Test public void testParallelWithSchedule() { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(15)).createCommand(any(Command.class)); } @Test public void testNoMsterServers() { Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(new ArrayList<>()); Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null); Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS); } @Test public void testExecuteRepeatRunning() { Mockito.when(processService.verifyIsNeedCreateCommand(any(Command.class))).thenReturn(true); Map<String, Object> result = executorService.execute(loginUser, projectName, processInstanceId, ExecuteType.REPEAT_RUNNING); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } private List<Server> getMasterServersList() { List<Server> masterServerList = new ArrayList<>(); Server masterServer1 = new Server(); masterServer1.setId(1); masterServer1.setHost("192.168.220.188"); masterServer1.setPort(1121); masterServerList.add(masterServer1); Server masterServer2 = new Server(); masterServer2.setId(2); masterServer2.setHost("192.168.220.189"); masterServer2.setPort(1122); masterServerList.add(masterServer2); return masterServerList; } private List zeroSchedulerList() { return Collections.EMPTY_LIST; } private List<Schedule> oneSchedulerList() { List<Schedule> schedulerList = new LinkedList<>(); Schedule schedule = new Schedule(); schedule.setCrontab("0 0 0 1/2 * ?"); schedulerList.add(schedule); return schedulerList; } private Map<String, Object> checkProjectAndAuth() { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="start-process-model"> <div class="clearfix list"> <div class="text"> {{$t('Process Name')}} </div> <div style="line-height: 32px;">{{workflowName}}</div> </div> <div class="clearfix list"> <div class="text"> {{$t('Failure Strategy')}} </div> <div class="cont"> <el-radio-group v-model="failureStrategy" style="margin-top: 7px;" size="small"> <el-radio :label="'CONTINUE'">{{$t('Continue')}}</el-radio> <el-radio :label="'END'">{{$t('End')}}</el-radio> </el-radio-group> </div> </div> <div class="clearfix list" v-if="sourceType === 'contextmenu'" style="margin-top: -8px;"> <div class="text"> {{$t('Node execution')}} </div> <div class="cont" style="padding-top: 6px;"> <el-radio-group v-model="taskDependType" size="small"> <el-radio :label="'TASK_POST'">{{$t('Backward execution')}}</el-radio> <el-radio :label="'TASK_PRE'">{{$t('Forward execution')}}</el-radio> <el-radio :label="'TASK_ONLY'">{{$t('Execute only the current node')}}</el-radio> </el-radio-group> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Notification strategy')}} </div> <div class="cont"> <el-select style="width: 200px;" v-model="warningType" size="small"> <el-option v-for="city in warningTypeList" :key="city.id" :value="city.id" :label="city.code"> </el-option> </el-select> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Process priority')}} </div> <div class="cont"> <m-priority v-model="processInstancePriority"></m-priority> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Worker group')}} </div> <div class="cont"> <m-worker-groups v-model="workerGroup"></m-worker-groups> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Alarm group')}} </div> <div class="cont"> <el-select style="width: 200px;" clearable size="small" v-model="warningGroupId" :disabled="!notifyGroupList.length"> <el-input slot="trigger" slot-scope="{ selectedModel }" readonly size="small" :value="selectedModel ? selectedModel.label : ''" style="width: 200px;" @on-click-icon.stop="warningGroupId = ''"> <em slot="suffix" class="el-icon-error" style="font-size: 15px;cursor: pointer;" v-show="warningGroupId"></em> <em slot="suffix" class="el-icon-bottom" style="font-size: 12px;" v-show="!warningGroupId"></em> </el-input> <el-option v-for="city in notifyGroupList" :key="city.id" :value="city.id" :label="city.code"> </el-option> </el-select> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Complement Data')}} </div> <div class="cont"> <div style="padding-top: 6px;"> <el-checkbox v-model="execType" size="small">{{$t('Whether it is a complement process?')}}</el-checkbox> </div> </div> </div> <template v-if="execType"> <div class="clearfix list" style="margin:-6px 0 16px 0"> <div class="text"> {{$t('Mode of execution')}} </div> <div class="cont"> <el-radio-group v-model="runMode" style="margin-top: 7px;"> <el-radio :label="'RUN_MODE_SERIAL'">{{$t('Serial execution')}}</el-radio> <el-radio :label="'RUN_MODE_PARALLEL'">{{$t('Parallel execution')}}</el-radio> </el-radio-group> </div> </div> <div class="clearfix list"> <div class="text"> {{$t('Schedule date')}} </div> <div class="cont"> <el-date-picker style="width: 360px" v-model="scheduleTime" size="small" @change="_datepicker" type="datetimerange" range-separator="-" :start-placeholder="$t('startDate')" :end-placeholder="$t('endDate')" value-format="yyyy-MM-dd HH:mm:ss"> </el-date-picker> </div> </div> </template> <div class="clearfix list"> <div class="text"> <span>{{$t('Startup parameter')}}</span> </div> <div class="cont" style="width: 688px;"> <div style="padding-top: 6px;"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="udpList" :hide="false"> </m-local-params> </div> </div> </div> <div class="submit"> <el-button type="text" size="small" @click="close()"> {{$t('Cancel')}} </el-button> <el-button type="primary" size="small" round :loading="spinnerLoading" @click="ok()">{{spinnerLoading ? 'Loading...' : $t('Start')}} </el-button> </div> </div> </template> <script> import _ from 'lodash' import dayjs from 'dayjs' import store from '@/conf/home/store' import { warningTypeList } from './util' import mPriority from '@/module/components/priority/priority' import mWorkerGroups from '@/conf/home/pages/dag/_source/formModel/_source/workerGroups' import mLocalParams from '@/conf/home/pages/dag/_source/formModel/tasks/_source/localParams' import disabledState from '@/module/mixin/disabledState' import { mapMutations } from 'vuex' export default { name: 'start-process', data () { return { store, processDefinitionId: 0, failureStrategy: 'CONTINUE', warningTypeList: warningTypeList, workflowName: '', warningType: '', notifyGroupList: [], warningGroupId: '', scheduleTime: '', spinnerLoading: false, execType: false, taskDependType: 'TASK_POST', runMode: 'RUN_MODE_SERIAL', processInstancePriority: 'MEDIUM', workerGroup: 'default', // Global custom parameters definitionGlobalParams: [], udpList: [] } }, mixins: [disabledState], props: { startData: Object, startNodeList: { type: String, default: '' }, sourceType: String }, methods: { ...mapMutations('dag', ['setIsDetails', 'resetParams']), _onLocalParams (a) { this.udpList = a }, _datepicker (val) { this.scheduleTime = val }, _start () { this.spinnerLoading = true let startParams = {} for (const item of this.udpList) { if (item.value !== '') { startParams[item.prop] = item.value } } let param = { processDefinitionId: this.startData.id, scheduleTime: this.scheduleTime.length && this.scheduleTime.join(',') || '', failureStrategy: this.failureStrategy, warningType: this.warningType, warningGroupId: this.warningGroupId === '' ? 0 : this.warningGroupId, execType: this.execType ? 'COMPLEMENT_DATA' : null, startNodeList: this.startNodeList, taskDependType: this.taskDependType, runMode: this.runMode, processInstancePriority: this.processInstancePriority, workerGroup: this.workerGroup, startParams: !_.isEmpty(startParams) ? JSON.stringify(startParams) : '' } // Executed from the specified node if (this.sourceType === 'contextmenu') { param.taskDependType = this.taskDependType } this.store.dispatch('dag/processStart', param).then(res => { this.$message.success(res.msg) this.$emit('onUpdateStart') // recovery this.udpList = _.cloneDeep(this.definitionGlobalParams) setTimeout(() => { this.spinnerLoading = false this.close() }, 500) }).catch(e => { this.$message.error(e.msg || '') this.spinnerLoading = false }) }, _getNotifyGroupList () { return new Promise((resolve, reject) => { this.store.dispatch('dag/getNotifyGroupList').then(res => { this.notifyGroupList = res resolve() }) }) }, _getGlobalParams () { this.store.dispatch('dag/getProcessDetails', this.startData.id).then(res => { this.definitionGlobalParams = _.cloneDeep(this.store.state.dag.globalParams) this.udpList = _.cloneDeep(this.store.state.dag.globalParams) }) }, ok () { this._start() }, close () { this.$emit('closeStart') } }, watch: { execType (a) { this.scheduleTime = a ? [dayjs().format('YYYY-MM-DD 00:00:00'), dayjs().format('YYYY-MM-DD 00:00:00')] : '' } }, created () { this.warningType = this.warningTypeList[0].id this.workflowName = this.startData.name this._getGlobalParams() let stateWorkerGroupsList = this.store.state.security.workerGroupsListAll || [] if (stateWorkerGroupsList.length) { this.workerGroup = stateWorkerGroupsList[0].id } else { this.store.dispatch('security/getWorkerGroupsAll').then(res => { this.$nextTick(() => { if (res.length > 0) { this.workerGroup = res[0].id } }) }) } }, mounted () { this._getNotifyGroupList().then(() => { this.$nextTick(() => { this.warningGroupId = '' }) }) this.workflowName = this.startData.name }, computed: {}, components: { mPriority, mWorkerGroups, mLocalParams } } </script> <style lang="scss" rel="stylesheet/scss"> .start-process-model { width: 860px; min-height: 300px; background: #fff; border-radius: 3px; .title-box { margin-bottom: 18px; span { padding-left: 30px; font-size: 16px; padding-top: 29px; display: block; } } .list { margin-bottom: 14px; .text { width: 140px; float: left; text-align: right; line-height: 32px; padding-right: 8px; } .cont { width: 350px; float: left; .add-email-model { padding: 20px; } } } .submit { text-align: right; padding-right: 30px; padding-top: 10px; padding-bottom: 30px; } } </style>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,385
[Improvement][recording data] Manually configure the parallelism of supplementary when recording data
*i met a situation that the memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks, if there is a feature that we can configure the parallelism of supplementary recording data manually * **Describe the question** The memory fulls and the dolphinscheduler server down when i start the recording data(补数) tasks. **What are the current deficiencies and the benefits of improvement** We can configure the parallelism of supplementary recording data manually so that the machine resource will not be occupied too much. **Which version of DolphinScheduler:** -[1.3.3-preview] **Describe alternatives you've considered** Changing the model to "Serial"(串行) but this model will take too much time.
https://github.com/apache/dolphinscheduler/issues/4385
https://github.com/apache/dolphinscheduler/pull/5912
2afa625a753680313e8a6c5fb3a68e01e56f5caa
1887bde1ebf8249de153890e78d449582e3eaf4a
"2021-01-06T02:58:47Z"
java
"2021-08-16T06:59:28Z"
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': 'User Name', 'Please enter user name': 'Please enter user name', Password: 'Password', 'Please enter your password': 'Please enter your password', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22', Login: 'Login', Home: 'Home', 'Failed to create node to save': 'Failed to create node to save', 'Global parameters': 'Global parameters', 'Local parameters': 'Local parameters', 'Copy success': 'Copy success', 'The browser does not support automatic copying': 'The browser does not support automatic copying', 'Whether to save the DAG graph': 'Whether to save the DAG graph', 'Current node settings': 'Current node settings', 'View history': 'View history', 'View log': 'View log', 'Force success': 'Force success', 'Enter this child node': 'Enter this child node', 'Node name': 'Node name', 'Please enter name (required)': 'Please enter name (required)', 'Run flag': 'Run flag', Normal: 'Normal', 'Prohibition execution': 'Prohibition execution', 'Please enter description': 'Please enter description', 'Number of failed retries': 'Number of failed retries', Times: 'Times', 'Failed retry interval': 'Failed retry interval', Minute: 'Minute', 'Delay execution time': 'Delay execution time', 'Delay execution': 'Delay execution', 'Forced success': 'Forced success', Cancel: 'Cancel', 'Confirm add': 'Confirm add', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process', 'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process', 'Name already exists': 'Name already exists', 'Download Log': 'Download Log', 'Refresh Log': 'Refresh Log', 'Enter full screen': 'Enter full screen', 'Cancel full screen': 'Cancel full screen', Close: 'Close', 'Update log success': 'Update log success', 'No more logs': 'No more logs', 'No log': 'No log', 'Loading Log...': 'Loading Log...', 'Set the DAG diagram name': 'Set the DAG diagram name', 'Please enter description(optional)': 'Please enter description(optional)', 'Set global': 'Set global', 'Whether to go online the process definition': 'Whether to go online the process definition', 'Whether to update the process definition': 'Whether to update the process definition', Add: 'Add', 'DAG graph name cannot be empty': 'DAG graph name cannot be empty', 'Create Datasource': 'Create Datasource', 'Project Home': 'Workflow Monitor', 'Project Manage': 'Project', 'Create Project': 'Create Project', 'Cron Manage': 'Cron Manage', 'Copy Workflow': 'Copy Workflow', 'Tenant Manage': 'Tenant Manage', 'Create Tenant': 'Create Tenant', 'User Manage': 'User Manage', 'Create User': 'Create User', 'User Information': 'User Information', 'Edit Password': 'Edit Password', Success: 'Success', Failed: 'Failed', Delete: 'Delete', 'Please choose': 'Please choose', 'Please enter a positive integer': 'Please enter a positive integer', 'Program Type': 'Program Type', 'Main Class': 'Main Class', 'Main Jar Package': 'Main Jar Package', 'Please enter main jar package': 'Please enter main jar package', 'Please enter main class': 'Please enter main class', 'Main Arguments': 'Main Arguments', 'Please enter main arguments': 'Please enter main arguments', 'Option Parameters': 'Option Parameters', 'Please enter option parameters': 'Please enter option parameters', Resources: 'Resources', 'Custom Parameters': 'Custom Parameters', 'Custom template': 'Custom template', Datasource: 'Datasource', methods: 'methods', 'Please enter the procedure method': 'Please enter the procedure script \n\ncall procedure:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\ncall function:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ', 'The procedure method script example': 'example:{call <procedure-name>[(?,?, ...)]} or {?= call <procedure-name>[(?,?, ...)]}', Script: 'Script', 'Please enter script(required)': 'Please enter script(required)', 'Deploy Mode': 'Deploy Mode', 'Driver Cores': 'Driver Cores', 'Please enter Driver cores': 'Please enter Driver cores', 'Driver Memory': 'Driver Memory', 'Please enter Driver memory': 'Please enter Driver memory', 'Executor Number': 'Executor Number', 'Please enter Executor number': 'Please enter Executor number', 'The Executor number should be a positive integer': 'The Executor number should be a positive integer', 'Executor Memory': 'Executor Memory', 'Please enter Executor memory': 'Please enter Executor memory', 'Executor Cores': 'Executor Cores', 'Please enter Executor cores': 'Please enter Executor cores', 'Memory should be a positive integer': 'Memory should be a positive integer', 'Core number should be positive integer': 'Core number should be positive integer', 'Flink Version': 'Flink Version', 'JobManager Memory': 'JobManager Memory', 'Please enter JobManager memory': 'Please enter JobManager memory', 'TaskManager Memory': 'TaskManager Memory', 'Please enter TaskManager memory': 'Please enter TaskManager memory', 'Slot Number': 'Slot Number', 'Please enter Slot number': 'Please enter Slot number', Parallelism: 'Parallelism', 'Please enter Parallelism': 'Please enter Parallelism', 'TaskManager Number': 'TaskManager Number', 'Please enter TaskManager number': 'Please enter TaskManager number', 'App Name': 'App Name', 'Please enter app name(optional)': 'Please enter app name(optional)', 'SQL Type': 'SQL Type', 'Send Email': 'Send Email', 'Log display': 'Log display', 'Max Numbers Return': 'Number of records to return', 'Max Numbers Return placeholder': 'Default is 10000, a large value may cause high pressure on the memory', 'Max Numbers Return required': 'Number of records to return parameter must be a number in the range of 0 - 2147483647', 'rows of result': 'rows of result', Title: 'Title', 'Please enter the title of email': 'Please enter the title of email', Table: 'Table', TableMode: 'Table', Attachment: 'Attachment', 'SQL Parameter': 'SQL Parameter', 'SQL Statement': 'SQL Statement', 'UDF Function': 'UDF Function', 'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)', 'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)', 'One form or attachment must be selected': 'One form or attachment must be selected', 'Mail subject required': 'Mail subject required', 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', 'Please enter IP': 'Please enter IP', Port: 'Port', 'Please enter port': 'Please enter port', 'Database Name': 'Database Name', 'Please enter database name': 'Please enter database name', 'Oracle Connect Type': 'ServiceName or SID', 'Oracle Service Name': 'ServiceName', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc connect parameters', 'Test Connect': 'Test Connect', 'Please enter resource name': 'Please enter resource name', 'Please enter resource folder name': 'Please enter resource folder name', 'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement', 'Please enter IP/hostname': 'Please enter IP/hostname', 'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format', '#': '#', 'Datasource Type': 'Datasource Type', 'Datasource Parameter': 'Datasource Parameter', 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', 'Switch Version Successfully': 'Switch Version Successfully', 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', State: 'State', 'Process Status Statistics': 'Process Status Statistics', 'Process Definition Statistics': 'Process Definition Statistics', 'Project Name': 'Project Name', 'Please enter name': 'Please enter name', 'Owned Users': 'Owned Users', 'Process Pid': 'Process Pid', 'Zk registration directory': 'Zk registration directory', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': 'Last heartbeat time', 'Edit Tenant': 'Edit Tenant', 'OS Tenant Code': 'OS Tenant Code', 'Tenant Name': 'Tenant Name', Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the os tenant code in English': 'Please enter the os tenant code in English', 'Please enter os tenant code in English': 'Please enter os tenant code in English', 'Please enter os tenant code': 'Please enter os tenant code', 'Please enter tenant Name': 'Please enter tenant Name', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed', 'Edit User': 'Edit User', Tenant: 'Tenant', Email: 'Email', Phone: 'Phone', 'User Type': 'User Type', 'Please enter phone number': 'Please enter phone number', 'Please enter email': 'Please enter email', 'Please enter the correct email format': 'Please enter the correct email format', 'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format', Project: 'Project', Authorize: 'Authorize', 'File resources': 'File resources', 'UDF resources': 'UDF resources', 'UDF resources directory': 'UDF resources directory', 'Please select UDF resources directory': 'Please select UDF resources directory', 'Alarm group': 'Alarm group', 'Alarm group required': 'Alarm group required', 'Edit alarm group': 'Edit alarm group', 'Create alarm group': 'Create alarm group', 'Create Alarm Instance': 'Create Alarm Instance', 'Edit Alarm Instance': 'Edit Alarm Instance', 'Group Name': 'Group Name', 'Alarm instance name': 'Alarm instance name', 'Alarm plugin name': 'Alarm plugin name', 'Select plugin': 'Select plugin', 'Select Alarm plugin': 'Please select an Alarm plugin', 'Please enter group name': 'Please enter group name', 'Instance parameter exception': 'Instance parameter exception', 'Group Type': 'Group Type', 'Alarm plugin instance': 'Alarm plugin instance', 'Select Alarm plugin instance': 'Please select an Alarm plugin instance', Remarks: 'Remarks', SMS: 'SMS', 'Managing Users': 'Managing Users', Permission: 'Permission', Administrator: 'Administrator', 'Confirm Password': 'Confirm Password', 'Please enter confirm password': 'Please enter confirm password', 'Password cannot be in Chinese': 'Password cannot be in Chinese', 'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password', 'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese', 'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password', 'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password', 'Please select the datasource': 'Please select the datasource', 'Please select resources': 'Please select resources', Query: 'Query', 'Non Query': 'Non Query', 'prop(required)': 'prop(required)', 'value(optional)': 'value(optional)', 'value(required)': 'value(required)', 'prop is empty': 'prop is empty', 'value is empty': 'value is empty', 'prop is repeat': 'prop is repeat', 'Start Time': 'Start Time', 'End Time': 'End Time', crontab: 'crontab', 'Failure Strategy': 'Failure Strategy', online: 'online', offline: 'offline', 'Task Status': 'Task Status', 'Process Instance': 'Process Instance', 'Task Instance': 'Task Instance', 'Select date range': 'Select date range', startDate: 'startDate', endDate: 'endDate', Date: 'Date', Waiting: 'Waiting', Execution: 'Execution', Finish: 'Finish', 'Create File': 'Create File', 'Create folder': 'Create folder', 'File Name': 'File Name', 'Folder Name': 'Folder Name', 'File Format': 'File Format', 'Folder Format': 'Folder Format', 'File Content': 'File Content', 'Upload File Size': 'Upload File size cannot exceed 1g', Create: 'Create', 'Please enter the resource content': 'Please enter the resource content', 'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines', 'File Details': 'File Details', 'Download Details': 'Download Details', Return: 'Return', Save: 'Save', 'File Manage': 'File Manage', 'Upload Files': 'Upload Files', 'Create UDF Function': 'Create UDF Function', 'Upload UDF Resources': 'Upload UDF Resources', 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', Executor: 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', host: 'host', 'fault-tolerant sign': 'fault-tolerant sign', Rerun: 'Rerun', 'Recovery Failed': 'Recovery Failed', Stop: 'Stop', Pause: 'Pause', 'Recovery Suspend': 'Recovery Suspend', Gantt: 'Gantt', 'Node Type': 'Node Type', 'Submit Time': 'Submit Time', Duration: 'Duration', 'Retry Count': 'Retry Count', 'Task Name': 'Task Name', 'Task Date': 'Task Date', 'Source Table': 'Source Table', 'Record Number': 'Record Number', 'Target Table': 'Target Table', 'Online viewing type is not supported': 'Online viewing type is not supported', Size: 'Size', Rename: 'Rename', Download: 'Download', Export: 'Export', 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', 'UDF Function Name': 'UDF Function Name', FILE: 'FILE', UDF: 'UDF', 'File Subdirectory': 'File Subdirectory', 'Please enter a function name': 'Please enter a function name', 'Package Name': 'Package Name', 'Please enter a Package name': 'Please enter a Package name', Parameter: 'Parameter', 'Please enter a parameter': 'Please enter a parameter', 'UDF Resources': 'UDF Resources', 'Upload Resources': 'Upload Resources', Instructions: 'Instructions', 'Please enter a instructions': 'Please enter a instructions', 'Please enter a UDF function name': 'Please enter a UDF function name', 'Select UDF Resources': 'Select UDF Resources', 'Class Name': 'Class Name', 'Jar Package': 'Jar Package', 'Library Name': 'Library Name', 'UDF Resource Name': 'UDF Resource Name', 'File Size': 'File Size', Description: 'Description', 'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items', 'Select Line Connection': 'Select Line Connection', 'Delete selected lines or nodes': 'Delete selected lines or nodes', 'Full Screen': 'Full Screen', Unpublished: 'Unpublished', 'Start Process': 'Start Process', 'Execute from the current node': 'Execute from the current node', 'Recover tolerance fault process': 'Recover tolerance fault process', 'Resume the suspension process': 'Resume the suspension process', 'Execute from the failed nodes': 'Execute from the failed nodes', 'Complement Data': 'Complement Data', 'Scheduling execution': 'Scheduling execution', 'Recovery waiting thread': 'Recovery waiting thread', 'Submitted successfully': 'Submitted successfully', Executing: 'Executing', 'Ready to pause': 'Ready to pause', 'Ready to stop': 'Ready to stop', 'Need fault tolerance': 'Need fault tolerance', Kill: 'Kill', 'Waiting for thread': 'Waiting for thread', 'Waiting for dependence': 'Waiting for dependence', Start: 'Start', Copy: 'Copy', 'Copy name': 'Copy name', 'Copy path': 'Copy path', 'Please enter keyword': 'Please enter keyword', 'File Upload': 'File Upload', 'Drag the file into the current upload window': 'Drag the file into the current upload window', 'Drag area upload': 'Drag area upload', Upload: 'Upload', 'ReUpload File': 'ReUpload File', 'Please enter file name': 'Please enter file name', 'Please select the file to upload': 'Please select the file to upload', 'Resources manage': 'Resources', Security: 'Security', Logout: 'Logout', 'No data': 'No data', 'Uploading...': 'Uploading...', 'Loading...': 'Loading...', List: 'List', 'Unable to download without proper url': 'Unable to download without proper url', Process: 'Process', 'Process definition': 'Process definition', 'Task record': 'Task record', 'Warning group manage': 'Warning group manage', 'Warning instance manage': 'Warning instance manage', 'Servers manage': 'Servers manage', 'UDF manage': 'UDF manage', 'Resource manage': 'Resource manage', 'Function manage': 'Function manage', 'Edit password': 'Edit password', 'Ordinary users': 'Ordinary users', 'Create process': 'Create process', 'Import process': 'Import process', 'Timing state': 'Timing state', Timing: 'Timing', Timezone: 'Timezone', TreeView: 'TreeView', 'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat', 'Mailbox input is illegal': 'Mailbox input is illegal', 'Please set the parameters before starting': 'Please set the parameters before starting', Continue: 'Continue', End: 'End', 'Node execution': 'Node execution', 'Backward execution': 'Backward execution', 'Forward execution': 'Forward execution', 'Execute only the current node': 'Execute only the current node', 'Notification strategy': 'Notification strategy', 'Notification group': 'Notification group', 'Please select a notification group': 'Please select a notification group', receivers: 'receivers', receiverCcs: 'receiverCcs', 'Whether it is a complement process?': 'Whether it is a complement process?', 'Schedule date': 'Schedule date', 'Mode of execution': 'Mode of execution', 'Serial execution': 'Serial execution', 'Parallel execution': 'Parallel execution', 'Set parameters before timing': 'Set parameters before timing', 'Start and stop time': 'Start and stop time', 'Please select time': 'Please select time', 'Please enter crontab': 'Please enter crontab', none_1: 'none', success_1: 'success', failure_1: 'failure', All_1: 'All', Toolbar: 'Toolbar', 'View variables': 'View variables', 'Format DAG': 'Format DAG', 'Refresh DAG status': 'Refresh DAG status', Return_1: 'Return', 'Please enter format': 'Please enter format', 'connection parameter': 'connection parameter', 'Process definition details': 'Process definition details', 'Create process definition': 'Create process definition', 'Scheduled task list': 'Scheduled task list', 'Process instance details': 'Process instance details', 'Create Resource': 'Create Resource', 'User Center': 'User Center', AllStatus: 'All', None: 'None', Name: 'Name', 'Process priority': 'Process priority', 'Task priority': 'Task priority', 'Task timeout alarm': 'Task timeout alarm', 'Timeout strategy': 'Timeout strategy', 'Timeout alarm': 'Timeout alarm', 'Timeout failure': 'Timeout failure', 'Timeout period': 'Timeout period', 'Waiting Dependent complete': 'Waiting Dependent complete', 'Waiting Dependent start': 'Waiting Dependent start', 'Check interval': 'Check interval', 'Timeout must be longer than check interval': 'Timeout must be longer than check interval', 'Timeout strategy must be selected': 'Timeout strategy must be selected', 'Timeout must be a positive integer': 'Timeout must be a positive integer', 'Add dependency': 'Add dependency', and: 'and', or: 'or', month: 'month', week: 'week', day: 'day', hour: 'hour', Running: 'Running', 'Waiting for dependency to complete': 'Waiting for dependency to complete', Selected: 'Selected', CurrentHour: 'CurrentHour', Last1Hour: 'Last1Hour', Last2Hours: 'Last2Hours', Last3Hours: 'Last3Hours', Last24Hours: 'Last24Hours', today: 'today', Last1Days: 'Last1Days', Last2Days: 'Last2Days', Last3Days: 'Last3Days', Last7Days: 'Last7Days', ThisWeek: 'ThisWeek', LastWeek: 'LastWeek', LastMonday: 'LastMonday', LastTuesday: 'LastTuesday', LastWednesday: 'LastWednesday', LastThursday: 'LastThursday', LastFriday: 'LastFriday', LastSaturday: 'LastSaturday', LastSunday: 'LastSunday', ThisMonth: 'ThisMonth', LastMonth: 'LastMonth', LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', 'History task record': 'History task record', 'Please go online': 'Please go online', 'Queue value': 'Queue value', 'Please enter queue value': 'Please enter queue value', 'Worker group manage': 'Worker group manage', 'Create worker group': 'Create worker group', 'Edit worker group': 'Edit worker group', 'Token manage': 'Token manage', 'Create token': 'Create token', 'Edit token': 'Edit token', Addresses: 'Addresses', 'Worker Addresses': 'Worker Addresses', 'Please select the worker addresses': 'Please select the worker addresses', 'Failure time': 'Failure time', 'Expiration time': 'Expiration time', User: 'User', 'Please enter token': 'Please enter token', 'Generate token': 'Generate token', Monitor: 'Monitor', Group: 'Group', 'Queue statistics': 'Queue statistics', 'Command status statistics': 'Command status statistics', 'Task kill': 'Task Kill', 'Task queue': 'Task queue', 'Error command count': 'Error command count', 'Normal command count': 'Normal command count', Manage: ' Manage', 'Number of connections': 'Number of connections', Sent: 'Sent', Received: 'Received', 'Min latency': 'Min latency', 'Avg latency': 'Avg latency', 'Max latency': 'Max latency', 'Node count': 'Node count', 'Query time': 'Query time', 'Node self-test status': 'Node self-test status', 'Health status': 'Health status', 'Max connections': 'Max connections', 'Threads connections': 'Threads connections', 'Max used connections': 'Max used connections', 'Threads running connections': 'Threads running connections', 'Worker group': 'Worker group', 'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0', 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', 'Process Define Count': 'Work flow Define Count', 'Process Instance Running Count': 'Process Instance Running Count', 'command number of waiting for running': 'command number of waiting for running', 'failure command number': 'failure command number', 'tasks number of waiting running': 'tasks number of waiting running', 'task number of ready to kill': 'task number of ready to kill', 'Statistics manage': 'Statistics Manage', statistics: 'Statistics', 'select tenant': 'select tenant', 'Please enter Principal': 'Please enter Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path', 'The start time must not be the same as the end': 'The start time must not be the same as the end', 'Startup parameter': 'Startup parameter', 'Startup type': 'Startup type', 'warning of timeout': 'warning of timeout', 'Next five execution times': 'Next five execution times', 'Execute time': 'Execute time', 'Complement range': 'Complement range', 'Http Url': 'Http Url', 'Http Method': 'Http Method', 'Http Parameters': 'Http Parameters', 'Http Parameters Key': 'Http Parameters Key', 'Http Parameters Position': 'Http Parameters Position', 'Http Parameters Value': 'Http Parameters Value', 'Http Check Condition': 'Http Check Condition', 'Http Condition': 'Http Condition', 'Please Enter Http Url': 'Please Enter Http Url(required)', 'Please Enter Http Condition': 'Please Enter Http Condition', 'There is no data for this period of time': 'There is no data for this period of time', 'Worker addresses cannot be empty': 'Worker addresses cannot be empty', 'Please generate token': 'Please generate token', 'Please Select token': 'Please select the expiration time of token', 'Spark Version': 'Spark Version', TargetDataBase: 'target database', TargetTable: 'target table', 'Please enter the table of target': 'Please enter the table of target', 'Please enter a Target Table(required)': 'Please enter a Target Table(required)', SpeedByte: 'speed(byte count)', SpeedRecord: 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', 'Modify User': 'Modify User', 'Whether directory': 'Whether directory', Yes: 'Yes', No: 'No', 'Hadoop Custom Params': 'Hadoop Params', 'Sqoop Advanced Parameters': 'Sqoop Params', 'Sqoop Job Name': 'Job Name', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', 'Please enter Hive Partition Values': 'Please enter Partition Value', 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', 'Please enter Fields Terminated': 'Please enter Fields Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Update Key': 'Please enter Update Key', 'Please enter Job Name(required)': 'Please enter Job Name(required)', 'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)', Direct: 'Direct', Type: 'Type', ModelType: 'ModelType', ColumnType: 'ColumnType', Database: 'Database', Column: 'Column', 'Map Column Hive': 'Map Column Hive', 'Map Column Java': 'Map Column Java', 'Export Dir': 'Export Dir', 'Hive partition Keys': 'Hive partition Keys', 'Hive partition Values': 'Hive partition Values', FieldsTerminated: 'FieldsTerminated', LinesTerminated: 'LinesTerminated', IsUpdate: 'IsUpdate', UpdateKey: 'UpdateKey', UpdateMode: 'UpdateMode', 'Target Dir': 'Target Dir', DeleteTargetDir: 'DeleteTargetDir', FileType: 'FileType', CompressionCodec: 'CompressionCodec', CreateHiveTable: 'CreateHiveTable', DropDelimiter: 'DropDelimiter', OverWriteSrc: 'OverWriteSrc', ReplaceDelimiter: 'ReplaceDelimiter', Concurrency: 'Concurrency', Form: 'Form', OnlyUpdate: 'OnlyUpdate', AllowInsert: 'AllowInsert', 'Data Source': 'Data Source', 'Data Target': 'Data Target', 'All Columns': 'All Columns', 'Some Columns': 'Some Columns', 'Branch flow': 'Branch flow', 'Custom Job': 'Custom Job', 'Custom Script': 'Custom Script', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'No resources exist': 'No resources exist', 'Please delete all non-existing resources': 'Please delete all non-existing resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Please delete all non-existent resources': 'Please delete all non-existent resources', Kinship: 'Workflow relationship', Reset: 'Reset', KinshipStateActive: 'Active', KinshipState1: 'Online', KinshipState0: 'Workflow is not online', KinshipState10: 'Scheduling is not online', 'Dag label display control': 'Dag label display control', Enable: 'Enable', Disable: 'Disable', 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!', 'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading', 'User name length is between 3 and 39': 'User name length is between 3 and 39', 'Timeout Settings': 'Timeout Settings', 'Connect Timeout': 'Connect Timeout', 'Socket Timeout': 'Socket Timeout', 'Connect timeout be a positive integer': 'Connect timeout be a positive integer', 'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer', ms: 'ms', 'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': 'Please select the waterdrop resources', zkDirectory: 'zkDirectory', 'Directory detail': 'Directory detail', 'Connection name': 'Connection name', 'Current connection settings': 'Current connection settings', 'Please save the DAG before formatting': 'Please save the DAG before formatting', 'Batch copy': 'Batch copy', 'Related items': 'Related items', 'Project name is required': 'Project name is required', 'Batch move': 'Batch move', Version: 'Version', 'Pre tasks': 'Pre tasks', 'Running Memory': 'Running Memory', 'Max Memory': 'Max Memory', 'Min Memory': 'Min Memory', 'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate', Info: 'Info', 'Datasource userName': 'owner', 'Resource userName': 'owner' }