status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
unknown | language
stringclasses 5
values | commit_datetime
unknown | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,513 | [Feature][UI] max retry time and max retry delay dropdown box can be edited | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
![](https://files.catbox.moe/osb1ju.png)
Max retry time and max retry delay dropdown box can be edited.Just like the old ways in 1.3.X.
### Use case
user can modified different max retry time and max retry delay.
### Related issues
_No response_
### Are you willing to submit a PR?
- [x] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7513 | https://github.com/apache/dolphinscheduler/pull/8032 | 677482795c5d95f26a6b64fe5bf00372626d4a59 | e45e78d8031e64b8f408d712251c2c6c94c52f93 | "2021-12-21T03:22:31Z" | java | "2022-01-14T07:45:53Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/selectInput.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<el-select
style="width: 157px;"
:disabled="isDetails"
size="small"
@change="_onChange"
v-model="selectedValue"
filterable
allow-create>
<el-input
ref="input"
slot="trigger"
v-if="isInput"
:disabled="isDetails"
slot-scope="{ selectedModel }"
maxlength="4"
@blur="_onBlur"
:placeholder="$t('Please choose')"
:value="selectedModel === null ? '0' : selectedModel.value"
style="width: 100%;"
@click="_ckIcon">
<em slot="suffix" class="el-icon-error" style="font-size: 15px;cursor: pointer;" v-show="!isIconState"></em>
<em slot="suffix" class="el-icon-arrow-down" style="font-size: 12px;" v-show="isIconState"></em>
</el-input>
<el-option
v-for="city in list"
:key="city"
:value="city"
:label="city">
</el-option>
</el-select>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import disabledState from '@/module/mixin/disabledState'
export default {
name: 'form-select-input',
data () {
return {
selectedValue: this.value,
isIconState: false,
isInput: true
}
},
mixins: [disabledState],
props: {
value: String,
list: Array
},
model: {
prop: 'value',
event: 'valueEvent'
},
methods: {
_onChange (o) {
this.$emit('valueEvent', +o)
this._setIconState(+o)
},
_setIconState (value) {
// Whether there is a list
this.isIconState = _.includes(this.list, parseInt(value))
},
_ckIcon () {
if (this.isDetails) {
return
}
this.isInput = false
this.$emit('valueEvent', +this.list[0])
this.isIconState = true
// Refresh instance
setTimeout(() => {
this.isInput = true
}, 1)
},
_onBlur () {
let val = $(this.$refs.input.$el).find('input')[0].value
if (this._validation(val)) {
this.$emit('valueEvent', val)
this._setIconState(val)
}
},
_validation (val) {
if (val === '0') return true
if (!(/(^[0-9]*[1-9][0-9]*$)/.test(val))) {
this.$message.warning(`${i18n.$t('Please enter a positive integer')}`)
// init
this._ckIcon()
return false
}
return true
}
},
watch: {
value (val) {
this.selectedValue = val
}
},
created () {
this._setIconState(this.selectedValue)
},
mounted () {
},
components: {}
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,513 | [Feature][UI] max retry time and max retry delay dropdown box can be edited | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
![](https://files.catbox.moe/osb1ju.png)
Max retry time and max retry delay dropdown box can be edited.Just like the old ways in 1.3.X.
### Use case
user can modified different max retry time and max retry delay.
### Related issues
_No response_
### Are you willing to submit a PR?
- [x] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7513 | https://github.com/apache/dolphinscheduler/pull/8032 | 677482795c5d95f26a6b64fe5bf00372626d4a59 | e45e78d8031e64b8f408d712251c2c6c94c52f93 | "2021-12-21T03:22:31Z" | java | "2022-01-14T07:45:53Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="form-model-wrapper" v-clickoutside="_handleClose">
<div class="title-box">
<span class="name"
>{{ $t("Current node settings") }}
<a
v-if="helpUrlEnable(nodeData.taskType)"
class="helper-link"
target="_blank"
:href="helpUrl(nodeData.taskType)"
>
<i class="el-icon-question" />
{{ nodeData.taskType }} {{ $t("Instructions") }}</a
>
</span>
<span class="go-subtask">
<!-- Component can't pop up box to do component processing -->
<m-log
v-if="type === 'instance' && taskInstance"
:item="backfillItem"
:task-instance-id="taskInstance.id"
>
<template slot="history"
><a href="javascript:" @click="_seeHistory"
><em class="ansicon el-icon-alarm-clock"></em
><em>{{ $t("View history") }}</em></a
></template
>
<template slot="log"
><a href="javascript:"
><em class="ansicon el-icon-document"></em
><em>{{ $t("View log") }}</em></a
></template
>
</m-log>
<a href="javascript:" @click="_goSubProcess" v-if="_isGoSubProcess"
><em class="ansicon ri-node-tree"></em
><em>{{ $t("Enter this child node") }}</em></a
>
</span>
</div>
<div class="content-box" v-if="isContentBox">
<div class="form-model">
<!-- Reference from task -->
<!-- <reference-from-task :taskType="nodeData.taskType" /> -->
<!-- Node name -->
<m-list-box>
<div slot="text">{{ $t("Node name") }}</div>
<div slot="content">
<el-input
type="text"
v-model="name"
size="small"
:disabled="isDetails"
:placeholder="$t('Please enter name (required)')"
maxlength="100"
id="inputNodeName"
>
</el-input>
</div>
</m-list-box>
<m-list-box v-if="fromTaskDefinition">
<div slot="text">{{ $t("Task Type") }}</div>
<div slot="content">
<el-select
@change="changeTaskType"
:value="nodeData.taskType"
:placeholder="$t('Please select a task type (required)')"
size="small"
style="width: 100%"
:disabled="isDetails"
>
<el-option
v-for="type in tasksTypeList"
:key="type"
:label="type"
:value="type"
:disabled="type === 'CONDITIONS' || type === 'SWITCH'"
>
</el-option>
</el-select>
</div>
</m-list-box>
<m-list-box v-if="fromTaskDefinition">
<div slot="text">{{ $t("Process Name") }}</div>
<div slot="content">
<el-select
@change="changeProcessCode"
:value="processCode"
size="small"
style="width: 100%"
:disabled="isDetails || taskDefinition"
>
<el-option
v-for="process in processListS"
:key="process.code"
:label="process.name"
:value="process.code"
/>
</el-select>
</div>
</m-list-box>
<!-- Running sign -->
<m-list-box>
<div slot="text">{{ $t("Run flag") }}</div>
<div slot="content">
<el-radio-group v-model="runFlag" size="small">
<el-radio :label="'YES'" :disabled="isDetails">{{
$t("Normal")
}}</el-radio>
<el-radio :label="'NO'" :disabled="isDetails">{{
$t("Prohibition execution")
}}</el-radio>
</el-radio-group>
</div>
</m-list-box>
<!-- description -->
<m-list-box>
<div slot="text">{{ $t("Description") }}</div>
<div slot="content">
<el-input
:rows="2"
type="textarea"
:disabled="isDetails"
v-model="desc"
:placeholder="$t('Please enter description')"
>
</el-input>
</div>
</m-list-box>
<!-- Task priority -->
<m-list-box>
<div slot="text">{{ $t("Task priority") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-priority v-model="taskInstancePriority"></m-priority>
</span>
</div>
</m-list-box>
<!-- Worker group and environment -->
<m-list-box>
<div slot="text">{{ $t("Worker group") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-worker-groups v-model="workerGroup"></m-worker-groups>
</span>
<span class="text-b">{{ $t("Environment Name") }}</span>
<m-related-environment
v-model="environmentCode"
:workerGroup="workerGroup"
:isNewCreate="isNewCreate"
v-on:environmentCodeEvent="_onUpdateEnvironmentCode"
></m-related-environment>
</div>
</m-list-box>
<!-- Worker group and environment -->
<m-list-box>
<div slot="text">{{ $t("Task group name") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-task-groups
v-model="taskGroupId"
:project-code="this.projectCode"
v-on:taskGroupIdEvent="_onUpdateTaskGroupId"
></m-task-groups>
</span>
<span class="text-b">{{ $t("Task group queue priority") }}</span>
<el-input
:disabled="taskGroupId === ''"
style="width: 166px"
type="input"
v-model="taskGroupPriority"
maxlength="60"
v-on:input="_onUpdateTaskGroupPriority"
size="small"
>
</el-input>
</div>
</m-list-box>
<!-- Number of failed retries -->
<m-list-box v-if="nodeData.taskType !== 'SUB_PROCESS'">
<div slot="text">{{ $t("Number of failed retries") }}</div>
<div slot="content">
<m-select-input
v-model="maxRetryTimes"
:list="[0, 1, 2, 3, 4]"
></m-select-input>
<span>({{ $t("Times") }})</span>
<span class="text-b">{{ $t("Failed retry interval") }}</span>
<m-select-input
v-model="retryInterval"
:list="[1, 10, 30, 60, 120]"
></m-select-input>
<span>({{ $t("Minute") }})</span>
</div>
</m-list-box>
<!-- Delay execution time -->
<m-list-box
v-if="
nodeData.taskType !== 'SUB_PROCESS' &&
nodeData.taskType !== 'CONDITIONS' &&
nodeData.taskType !== 'DEPENDENT' &&
nodeData.taskType !== 'SWITCH'
"
>
<div slot="text">{{ $t("Delay execution time") }}</div>
<div slot="content">
<m-select-input
v-model="delayTime"
:list="[0, 1, 5, 10]"
></m-select-input>
<span>({{ $t("Minute") }})</span>
</div>
</m-list-box>
<!-- Branch flow -->
<m-list-box v-if="nodeData.taskType === 'CONDITIONS'">
<div slot="text">{{ $t("State") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<el-select
style="width: 157px"
size="small"
v-model="successNode"
:disabled="true"
>
<el-option
v-for="item in stateList"
:key="item.value"
:value="item.value"
:label="item.label"
></el-option>
</el-select>
</span>
<span class="text-b" style="padding-left: 38px">{{
$t("Branch flow")
}}</span>
<el-select
style="width: 157px"
size="small"
v-model="successBranch"
clearable
:disabled="isDetails"
>
<el-option
v-for="item in postTasks"
:key="item.code"
:value="item.code"
:label="item.name"
></el-option>
</el-select>
</div>
</m-list-box>
<m-list-box v-if="nodeData.taskType === 'CONDITIONS'">
<div slot="text">{{ $t("State") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<el-select
style="width: 157px"
size="small"
v-model="failedNode"
:disabled="true"
>
<el-option
v-for="item in stateList"
:key="item.value"
:value="item.value"
:label="item.label"
></el-option>
</el-select>
</span>
<span class="text-b" style="padding-left: 38px">{{
$t("Branch flow")
}}</span>
<el-select
style="width: 157px"
size="small"
v-model="failedBranch"
clearable
:disabled="isDetails"
>
<el-option
v-for="item in postTasks"
:key="item.code"
:value="item.code"
:label="item.name"
></el-option>
</el-select>
</div>
</m-list-box>
<div v-if="backfillRefresh">
<!-- Task timeout alarm -->
<m-timeout-alarm
v-if="nodeData.taskType !== 'DEPENDENT'"
ref="timeout"
:backfill-item="backfillItem"
@on-timeout="_onTimeout"
>
</m-timeout-alarm>
<!-- Dependent timeout alarm -->
<m-dependent-timeout
v-if="nodeData.taskType === 'DEPENDENT'"
ref="dependentTimeout"
:backfill-item="backfillItem"
@on-timeout="_onDependentTimeout"
>
</m-dependent-timeout>
<!-- shell node -->
<m-shell
v-if="nodeData.taskType === 'SHELL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SHELL"
:backfill-item="backfillItem"
>
</m-shell>
<!-- sub_process node -->
<m-sub-process
v-if="nodeData.taskType === 'SUB_PROCESS'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
@on-set-process-name="_onSetProcessName"
ref="SUB_PROCESS"
:backfill-item="backfillItem"
>
</m-sub-process>
<!-- procedure node -->
<m-procedure
v-if="nodeData.taskType === 'PROCEDURE'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PROCEDURE"
:backfill-item="backfillItem"
>
</m-procedure>
<!-- sql node -->
<m-sql
v-if="nodeData.taskType === 'SQL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQL"
:create-node-id="nodeData.id"
:backfill-item="backfillItem"
>
</m-sql>
<!-- spark node -->
<m-spark
v-if="nodeData.taskType === 'SPARK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SPARK"
:backfill-item="backfillItem"
>
</m-spark>
<m-flink
v-if="nodeData.taskType === 'FLINK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="FLINK"
:backfill-item="backfillItem"
>
</m-flink>
<!-- mr node -->
<m-mr
v-if="nodeData.taskType === 'MR'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="MR"
:backfill-item="backfillItem"
>
</m-mr>
<!-- python node -->
<m-python
v-if="nodeData.taskType === 'PYTHON'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PYTHON"
:backfill-item="backfillItem"
>
</m-python>
<!-- dependent node -->
<m-dependent
v-if="nodeData.taskType === 'DEPENDENT'"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
ref="DEPENDENT"
:backfill-item="backfillItem"
>
</m-dependent>
<m-http
v-if="nodeData.taskType === 'HTTP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="HTTP"
:backfill-item="backfillItem"
>
</m-http>
<m-datax
v-if="nodeData.taskType === 'DATAX'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="DATAX"
:backfill-item="backfillItem"
>
</m-datax>
<m-pigeon
v-if="nodeData.taskType === 'PIGEON'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
:backfill-item="backfillItem"
ref="PIGEON"
>
</m-pigeon>
<m-sqoop
v-if="nodeData.taskType === 'SQOOP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQOOP"
:backfill-item="backfillItem"
>
</m-sqoop>
<m-conditions
v-if="nodeData.taskType === 'CONDITIONS'"
ref="CONDITIONS"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
:backfill-item="backfillItem"
:prev-tasks="prevTasks"
>
</m-conditions>
<m-switch
v-if="nodeData.taskType === 'SWITCH'"
ref="SWITCH"
@on-switch-result="_onSwitchResult"
:backfill-item="backfillItem"
:nodeData="nodeData"
:postTasks="postTasks"
></m-switch>
<!-- seatunnel node -->
<m-seatunnel
v-if="nodeData.taskType === 'SEATUNNEL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SEATUNNEL"
:backfill-item="backfillItem"
>
</m-seatunnel>
</div>
<!-- Pre-tasks in workflow -->
<m-pre-tasks ref="preTasks" :code="code" :fromTaskDefinition="fromTaskDefinition" :prevTasks="prevTasks" :processDefinition="processDefinition"/>
</div>
</div>
<div class="bottom-box">
<div class="submit" style="background: #fff">
<el-button type="text" size="small" id="cancelBtn">
{{ $t("Cancel") }}
</el-button>
<el-button
type="primary"
size="small"
round
:loading="spinnerLoading"
@click="ok()"
:disabled="isDetails"
id="btnSubmit"
>{{ spinnerLoading ? $t("Loading...") : $t("Confirm") }}
</el-button>
</div>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions, mapState } from 'vuex'
import mLog from './log'
import mMr from './tasks/mr'
import mSql from './tasks/sql'
import i18n from '@/module/i18n'
import { findLocale } from '@/module/i18n/config'
import mListBox from './tasks/_source/listBox'
import mShell from './tasks/shell'
import mSeatunnel from './tasks/seatunnel'
import mSpark from './tasks/spark'
import mFlink from './tasks/flink'
import mPython from './tasks/python'
import mProcedure from './tasks/procedure'
import mDependent from './tasks/dependent'
import mHttp from './tasks/http'
import mDatax from './tasks/datax'
import mPigeon from './tasks/pigeon'
import mConditions from './tasks/conditions'
import mSwitch from './tasks/switch.vue'
import mSqoop from './tasks/sqoop'
import mSubProcess from './tasks/sub_process'
import mSelectInput from './_source/selectInput'
import mTimeoutAlarm from './_source/timeoutAlarm'
import mDependentTimeout from './_source/dependentTimeout'
import mWorkerGroups from './_source/workerGroups'
import mRelatedEnvironment from './_source/relatedEnvironment'
import mTaskGroups from './_source/taskGroups'
import mPreTasks from './tasks/pre_tasks'
import clickoutside from '@/module/util/clickoutside'
import disabledState from '@/module/mixin/disabledState'
import mPriority from '@/module/components/priority/priority'
import { findComponentDownward } from '@/module/util/'
import { tasksType } from '@/conf/home/pages/dag/_source/config.js'
// import ReferenceFromTask from './_source/referenceFromTask.vue'
export default {
name: 'form-model',
data () {
return {
// loading
spinnerLoading: false,
// node name
name: '',
// description
desc: '',
// Node echo data
backfillItem: {},
cacheBackfillItem: {},
// Resource(list)
resourcesList: [],
successNode: 'success',
failedNode: 'failed',
successBranch: '',
failedBranch: '',
conditionResult: {
successNode: [],
failedNode: []
},
switchResult: {},
// dependence
dependence: {},
// cache dependence
cacheDependence: {},
// task code
code: 0,
// Current node params data
params: {},
// Running sign
runFlag: 'YES',
// The second echo problem caused by the node data is specifically which node hook caused the unfinished special treatment
isContentBox: false,
// Number of failed retries
maxRetryTimes: '0',
// Failure retry interval
retryInterval: '1',
// Delay execution time
delayTime: '0',
// Task timeout alarm
timeout: {},
// (For Dependent nodes) Wait start timeout alarm
waitStartTimeout: {},
// Task priority
taskInstancePriority: 'MEDIUM',
// worker group id
workerGroup: 'default',
// selected environment
environmentCode: '',
selectedWorkerGroup: '',
taskGroupId: '',
taskGroupPriority: 0,
stateList: [
{
value: 'success',
label: `${i18n.$t('Success')}`
},
{
value: 'failed',
label: `${i18n.$t('Failed')}`
}
],
// for CONDITIONS and SWITCH
postTasks: [],
prevTasks: [],
// refresh part of the formModel, after set backfillItem outside
backfillRefresh: true,
// whether this is a new Task
isNewCreate: true,
tasksTypeList: Object.keys(tasksType),
// processCode
processCode: undefined,
processDefinition: null
}
},
provide () {
return {
formModel: this
}
},
/**
* Click on events that are not generated internally by the component
*/
directives: { clickoutside },
mixins: [disabledState],
props: {
nodeData: Object,
type: {
type: String,
default: ''
},
taskDefinition: Object,
projectCode: {
type: Number
}
},
inject: ['dagChart'],
methods: {
...mapActions('dag', ['getTaskInstanceList', 'getProcessDefinition']),
helpUrlEnable (typekey) {
const type = tasksType[typekey]
if (!type) return false
if (!type.helperLinkDisable) return true
return !type.helperLinkDisable
},
helpUrl (tasktype) {
return 'https://dolphinscheduler.apache.org/' +
findLocale(i18n.globalScope.LOCALE).helperContext +
'/docs/latest/user_doc/guide/task/' + tasktype.toLowerCase() + '.html'
},
taskToBackfillItem (task) {
return {
code: task.code,
conditionResult: task.taskParams.conditionResult,
switchResult: task.taskParams.switchResult,
delayTime: task.delayTime,
dependence: task.taskParams.dependence,
desc: task.description,
id: task.id,
maxRetryTimes: task.failRetryTimes,
name: task.name,
params: _.omit(task.taskParams, [
'conditionResult',
'dependence',
'waitStartTimeout',
'switchResult'
]),
retryInterval: task.failRetryInterval,
runFlag: task.flag,
taskInstancePriority: task.taskPriority,
timeout: {
interval: task.timeout,
strategy: task.timeoutNotifyStrategy,
enable: task.timeoutFlag === 'OPEN'
},
type: task.taskType,
waitStartTimeout: task.taskParams.waitStartTimeout,
workerGroup: task.workerGroup,
environmentCode: task.environmentCode,
taskGroupId: task.taskGroupId,
taskGroupPriority: task.taskGroupPriority
}
},
/**
* depend
*/
_onDependent (o) {
this.dependence = Object.assign(this.dependence, {}, o)
},
_onSwitchResult (o) {
this.switchResult = o
},
/**
* cache dependent
*/
_onCacheDependent (o) {
this.cacheDependence = Object.assign(this.cacheDependence, {}, o)
},
/**
* Task timeout alarm
*/
_onTimeout (o) {
this.timeout = Object.assign(this.timeout, {}, o)
},
/**
* Dependent timeout alarm
*/
_onDependentTimeout (o) {
this.timeout = Object.assign(this.timeout, {}, o.waitCompleteTimeout)
this.waitStartTimeout = Object.assign(
this.waitStartTimeout,
{},
o.waitStartTimeout
)
},
/**
* Click external to close the current component
*/
_handleClose () {
// this.close()
},
/**
* Jump to task instance
*/
_seeHistory () {
this.$emit('seeHistory', this.backfillItem.name)
},
/**
* Enter the child node to judge the process instance or the process definition
* @param type = instance
*/
_goSubProcess () {
if (_.isEmpty(this.backfillItem)) {
this.$message.warning(
`${i18n.$t(
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process'
)}`
)
return
}
if (this.router.history.current.name === 'projects-instance-details') {
if (!this.taskInstance) {
this.$message.warning(
`${i18n.$t(
'The task has not been executed and cannot enter the sub-Process'
)}`
)
return
}
this.store
.dispatch('dag/getSubProcessId', { taskId: this.taskInstance.id })
.then((res) => {
this.$emit('onSubProcess', {
subInstanceId: res.data.subProcessInstanceId,
fromThis: this
})
})
.catch((e) => {
this.$message.error(e.msg || '')
})
} else {
const processDefinitionId =
this.backfillItem.params.processDefinitionId
const process = this.processListS.find(
(def) => def.id === processDefinitionId
)
this.$emit('onSubProcess', {
subProcessCode: process.code,
fromThis: this
})
}
},
_onUpdateWorkerGroup (o) {
this.selectedWorkerGroup = o
},
/**
* return params
*/
_onParams (o) {
this.params = Object.assign({}, o)
},
_onUpdateEnvironmentCode (o) {
this.environmentCode = o
},
_onUpdateTaskGroupId (o) {
this.taskGroupId = o
if (this.taskGroupId === '') {
this.taskGroupPriority = 0
}
},
_onUpdateTaskGroupPriority (o) {
this.taskGroupPriority = o
},
/**
* _onCacheParams is reserved
*/
_onCacheParams (o) {
this.params = Object.assign(this.params, {}, o)
},
/**
* verification name
*/
_verifName () {
if (!_.trim(this.name)) {
this.$message.warning(`${i18n.$t('Please enter name (required)')}`)
return false
}
if (
this.successBranch &&
this.successBranch === this.failedBranch
) {
this.$message.warning(
`${i18n.$t(
'Cannot select the same node for successful branch flow and failed branch flow'
)}`
)
return false
}
if (this.name === this.backfillItem.name) {
return true
}
return true
},
_verifWorkGroup () {
let item = this.store.state.security.workerGroupsListAll.find((item) => {
return item.id === this.workerGroup
})
if (item === undefined) {
this.$message.warning(
`${i18n.$t(
'The Worker group no longer exists, please select the correct Worker group!'
)}`
)
return false
}
return true
},
_verifTaskType () {
if (!this.fromTaskDefinition) return true
if (!this.nodeData.taskType) {
this.$message.warning(
`${i18n.$t('Please select a task type (required)')}`
)
return false
}
return true
},
/**
* Global verification procedure
*/
_verification () {
// Verify name
if (!this._verifName()) {
return
}
// Verify task type
if (!this._verifTaskType()) {
return
}
// verif workGroup
if (!this._verifWorkGroup()) {
return
}
// Verify task alarm parameters
if (this.nodeData.taskType === 'DEPENDENT') {
if (!this.$refs.dependentTimeout._verification()) {
return
}
} else {
if (!this.$refs.timeout._verification()) {
return
}
}
// Verify node parameters
if (!this.$refs[this.nodeData.taskType]._verification()) {
return
}
// set dag preTask
if (this.dagChart && this.$refs.preTasks) {
this.$refs.preTasks.setDagPreNodes()
}
// set edge label
if (this.dagChart) {
this._setEdgeLabel()
}
this.successBranch && (this.conditionResult.successNode[0] = this.successBranch)
this.failedBranch && (this.conditionResult.failedNode[0] = this.failedBranch)
this.$emit('addTaskInfo', {
item: {
code: this.nodeData.id,
name: this.name,
description: this.desc,
taskType: this.nodeData.taskType,
taskParams: {
...this.params,
dependence: this.cacheDependence,
conditionResult: this.conditionResult,
waitStartTimeout: this.waitStartTimeout,
switchResult: this.switchResult
},
flag: this.runFlag,
taskPriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
failRetryTimes: this.maxRetryTimes,
failRetryInterval: this.retryInterval,
timeoutFlag: this.timeout.enable ? 'OPEN' : 'CLOSE',
timeoutNotifyStrategy: this.timeout.strategy,
timeout: this.timeout.interval || 0,
delayTime: this.delayTime,
environmentCode: this.environmentCode || -1,
taskGroupId: this.taskGroupId,
taskGroupPriority: this.taskGroupPriority
},
fromThis: this,
...(this.fromTaskDefinition ? {
prevTasks: this.$refs.preTasks ? this.$refs.preTasks.preTasks : [],
processCode: this.processCode
} : {})
})
},
/**
* Sub-workflow selected node echo name
*/
_onSetProcessName (name) {
this.name = name
},
/**
* set edge label by successBranch && failedBranch
*/
_setEdgeLabel () {
if (this.successBranch || this.failedBranch) {
const canvas = findComponentDownward(this.dagChart, 'dag-canvas')
const edges = canvas.getEdges()
const successTask = this.postTasks.find(
(t) => t.name === this.successBranch
)
const failedTask = this.postTasks.find(
(t) => t.name === this.failedBranch
)
const sEdge = edges.find(
(edge) =>
successTask &&
edge.sourceId === this.code &&
edge.targetId === successTask.code
)
const fEdge = edges.find(
(edge) =>
failedTask &&
edge.sourceId === this.code &&
edge.targetId === failedTask.code
)
sEdge && canvas.setEdgeLabel(sEdge.id, this.$t('Success'))
fEdge && canvas.setEdgeLabel(fEdge.id, this.$t('Failed'))
}
},
/**
* Submit verification
*/
ok () {
this._verification()
},
/**
* Close and destroy component and component internal events
*/
close () {
let flag = false
// Delete node without storage
if (!this.backfillItem.name) {
flag = true
}
this.isContentBox = false
// flag Whether to delete a node this.$destroy()
this.$emit('close', {
item: this.cacheBackfillItem,
flag: flag,
fromThis: this
})
},
backfill (backfillItem) {
const o = backfillItem
// Non-null objects represent backfill
if (!_.isEmpty(o)) {
this.code = o.code
this.name = o.name
this.taskInstancePriority = o.taskInstancePriority
this.runFlag = o.runFlag || 'YES'
this.desc = o.desc
this.maxRetryTimes = o.maxRetryTimes
this.retryInterval = o.retryInterval
this.delayTime = o.delayTime
if (o.conditionResult) {
this.successBranch = o.conditionResult.successNode[0]
this.failedBranch = o.conditionResult.failedNode[0]
}
if (o.switchResult) {
this.switchResult = o.switchResult
}
// If the workergroup has been deleted, set the default workergroup
for (
let i = 0;
i < this.store.state.security.workerGroupsListAll.length;
i++
) {
let workerGroup = this.store.state.security.workerGroupsListAll[i].id
if (o.workerGroup === workerGroup) {
break
}
}
if (o.workerGroup === undefined) {
this.store
.dispatch('dag/getTaskInstanceList', {
pageSize: 10,
pageNo: 1,
processInstanceId: this.nodeData.instanceId,
name: o.name
})
.then((res) => {
this.workerGroup = res.totalList[0].workerGroup
})
} else {
this.workerGroup = o.workerGroup
}
this.environmentCode =
o.environmentCode === -1 ? '' : o.environmentCode
this.params = o.params || {}
this.dependence = o.dependence || {}
this.cacheDependence = o.dependence || {}
this.taskGroupId = o.taskGroupId
this.taskGroupPriority = o.taskGroupPriority
} else {
this.workerGroup = this.store.state.security.workerGroupsListAll[0].id
}
this.cacheBackfillItem = JSON.parse(JSON.stringify(o))
this.isContentBox = true
},
changeTaskType (value) {
this.$emit('changeTaskType', value)
},
calculateRelatedTasks () {
if (this.processDefinition && this.taskDefinition) {
const relations = this.processDefinition.processTaskRelationList || []
const tasks = this.processDefinition.taskDefinitionList || []
const tasksMap = {}
tasks.forEach(task => {
tasksMap[task.code] = task
})
const taskCode = this.taskDefinition.code
const buildTask = (task) => ({
code: task.code,
name: task.name,
type: task.taskType
})
// Downstream tasks
const postTasks = relations
.filter(relation => relation.preTaskCode === taskCode)
.map(relation => buildTask(tasksMap[relation.postTaskCode]))
// Upstream tasks
const prevTasks = relations
.filter(relation => relation.postTaskCode === taskCode && relation.preTaskCode !== 0)
.map(relation => buildTask(tasksMap[relation.preTaskCode]))
this.postTasks = postTasks
this.prevTasks = prevTasks
}
},
getProcessDetails () {
this.getProcessDefinition(this.processCode).then(res => {
this.processDefinition = res
this.calculateRelatedTasks()
})
},
changeProcessCode (code) {
this.processCode = code
this.getProcessDetails()
}
},
created () {
// Backfill data
let o = {}
this.code = this.nodeData.id
if (this.fromTaskDefinition) {
if (this.taskDefinition) {
const backfillItem = this.taskToBackfillItem(this.taskDefinition)
o = backfillItem
this.backfillItem = backfillItem
this.isNewCreate = false
}
} else {
let taskList = this.store.state.dag.tasks
if (taskList.length) {
taskList.forEach((task) => {
if (task.code === this.nodeData.id) {
const backfillItem = this.taskToBackfillItem(task)
o = backfillItem
this.backfillItem = backfillItem
this.isNewCreate = false
}
})
}
}
this.backfill(o)
if (this.dagChart) {
const canvas = findComponentDownward(this.dagChart, 'dag-canvas')
const postNodes = canvas.getPostNodes(this.code)
const prevNodes = canvas.getPrevNodes(this.code)
const buildTask = (node) => ({
code: node.id,
name: node.data.taskName,
type: node.data.taskType
})
this.postTasks = postNodes.map(buildTask)
this.prevTasks = prevNodes.map(buildTask)
}
if (this.fromTaskDefinition && this.taskDefinition) {
this.processCode = this.taskDefinition.processCode
this.getProcessDetails()
}
},
mounted () {
let self = this
$('#cancelBtn').mousedown(function (event) {
event.preventDefault()
self.close()
})
},
updated () {},
beforeDestroy () {},
destroyed () {},
computed: {
...mapState('dag', ['processListS', 'taskInstances']),
/**
* Child workflow entry show/hide
*/
_isGoSubProcess () {
return this.nodeData.taskType === 'SUB_PROCESS' && this.name && this.processListS && this.processListS.length > 0
},
taskInstance () {
if (this.taskInstances.length > 0) {
return this.taskInstances.find(
(instance) => instance.taskCode === this.nodeData.id
)
}
return null
},
/**
* Open the modal from task definition
*/
fromTaskDefinition () {
return this.type === 'task-definition'
}
},
components: {
mListBox,
mMr,
mShell,
mSeatunnel,
mSubProcess,
mProcedure,
mSql,
mLog,
mSpark,
mFlink,
mPython,
mDependent,
mHttp,
mDatax,
mPigeon,
mSqoop,
mConditions,
mSwitch,
mSelectInput,
mTimeoutAlarm,
mDependentTimeout,
mPriority,
mWorkerGroups,
mRelatedEnvironment,
mPreTasks,
mTaskGroups
// ReferenceFromTask
}
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./formModel";
.ans-radio-disabled {
.ans-radio-inner:after {
background-color: #6f8391;
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.DataSourceService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang.StringUtils;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* data source service impl
*/
@Service
public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class);
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
/**
* create data source
*
* @param loginUser login user
* @param datasourceParam datasource parameters
* @return create result code
*/
@Override
public Result<Object> createDataSource(User loginUser, BaseDataSourceParamDTO datasourceParam) {
DataSourceUtils.checkDatasourceParam(datasourceParam);
Result<Object> result = new Result<>();
// check name can use or not
if (checkName(datasourceParam.getName())) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
// check connect
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(datasourceParam);
Result<Object> isConnection = checkConnection(datasourceParam.getType(), connectionParam);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
return result;
}
// build datasource
DataSource dataSource = new DataSource();
Date now = new Date();
dataSource.setName(datasourceParam.getName().trim());
dataSource.setNote(datasourceParam.getNote());
dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(datasourceParam.getType());
dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
dataSource.setCreateTime(now);
dataSource.setUpdateTime(now);
try {
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
} catch (DuplicateKeyException ex) {
logger.error("Create datasource error.", ex);
putMsg(result, Status.DATASOURCE_EXIST);
}
return result;
}
/**
* updateProcessInstance datasource
*
* @param loginUser login user
* @param id data source id
* @return update result code
*/
@Override
public Result<Object> updateDataSource(int id, User loginUser, BaseDataSourceParamDTO dataSourceParam) {
DataSourceUtils.checkDatasourceParam(dataSourceParam);
Result<Object> result = new Result<>();
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if (!dataSource.getName().trim().equals(dataSource.getName()) && checkName(dataSource.getName())) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
//check password,if the password is not updated, set to the old password.
BaseConnectionParam connectionParam = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(dataSourceParam);
String password = connectionParam.getPassword();
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
connectionParam.setPassword(oldParams.path(Constants.PASSWORD).asText());
}
Result<Object> isConnection = checkConnection(dataSource.getType(), connectionParam);
if (isConnection.isFailed()) {
return isConnection;
}
Date now = new Date();
dataSource.setName(dataSourceParam.getName().trim());
dataSource.setNote(dataSourceParam.getNote());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(dataSource.getType());
dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
dataSource.setUpdateTime(now);
try {
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
} catch (DuplicateKeyException ex) {
logger.error("Update datasource error.", ex);
putMsg(result, Status.DATASOURCE_EXIST);
}
return result;
}
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
return queryDataSource != null && !queryDataSource.isEmpty();
}
/**
* updateProcessInstance datasource
*
* @param id datasource id
* @return data source detail
*/
@Override
public Map<String, Object> queryDataSource(int id) {
Map<String, Object> result = new HashMap<>();
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// type
BaseDataSourceParamDTO baseDataSourceParamDTO = DataSourceUtils.buildDatasourceParamDTO(
dataSource.getType(), dataSource.getConnectionParams());
baseDataSourceParamDTO.setId(dataSource.getId());
baseDataSourceParamDTO.setName(dataSource.getName());
baseDataSourceParamDTO.setNote(dataSource.getNote());
result.put(Constants.DATA_LIST, baseDataSourceParamDTO);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datasource list by keyword
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return data source list page
*/
@Override
public Result queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
IPage<DataSource> dataSourceList;
Page<DataSource> dataSourcePage = new Page<>(pageNo, pageSize);
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
} else {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
handlePasswd(dataSources);
PageInfo<DataSource> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
pageInfo.setTotalList(dataSources);
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* handle datasource connection password for safety
*/
private void handlePasswd(List<DataSource> dataSourceList) {
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
ObjectNode object = JSONUtils.parseObject(connectionParams);
object.put(Constants.PASSWORD, getHiddenPassword());
dataSource.setConnectionParams(object.toString());
}
}
/**
* get hidden password (resolve the security hotspot)
*
* @return hidden password
*/
private String getHiddenPassword() {
return Constants.XXXXXX;
}
/**
* query data resource list
*
* @param loginUser login user
* @param type data source type
* @return data source list page
*/
@Override
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
} else {
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
result.put(Constants.DATA_LIST, datasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify datasource exists
*
* @param name datasource name
* @return true if data datasource not exists, otherwise return false
*/
@Override
public Result<Object> verifyDataSourceName(String name) {
Result<Object> result = new Result<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && !dataSourceList.isEmpty()) {
putMsg(result, Status.DATASOURCE_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check connection
*
* @param type data source type
* @param connectionParam connectionParam
* @return true if connect successfully, otherwise false
* @return true if connect successfully, otherwise false
*/
@Override
public Result<Object> checkConnection(DbType type, ConnectionParam connectionParam) {
Result<Object> result = new Result<>();
try (Connection connection = DataSourceClientProvider.getInstance().getConnection(type, connectionParam)) {
if (connection == null) {
putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
} catch (Exception e) {
logger.error("datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type, connectionParam, e.getMessage());
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), e.getMessage());
}
}
/**
* test connection
*
* @param id datasource id
* @return connect result code
*/
@Override
public Result<Object> connectionTest(int id) {
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
Result<Object> result = new Result<>();
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
return checkConnection(dataSource.getType(), DataSourceUtils.buildConnectionParams(dataSource.getType(), dataSource.getConnectionParams()));
}
/**
* delete datasource
*
* @param loginUser login user
* @param datasourceId data source id
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Result<Object> delete(User loginUser, int datasourceId) {
Result<Object> result = new Result<>();
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if (dataSource == null) {
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error", e);
throw new RuntimeException("delete datasource error");
}
return result;
}
/**
* unauthorized datasource
*
* @param loginUser login user
* @param userId user id
* @return unauthed data source result code
*/
@Override
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
//only admin operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
/**
* query all data sources except userId
*/
List<DataSource> resultList = new ArrayList<>();
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId);
Set<DataSource> datasourceSet = null;
if (datasourceList != null && !datasourceList.isEmpty()) {
datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && !authedDataSourceList.isEmpty()) {
authedDataSourceSet = new HashSet<>(authedDataSourceList);
datasourceSet.removeAll(authedDataSourceSet);
}
resultList = new ArrayList<>(datasourceSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized datasource
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
@Override
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* project service impl
**/
@Service
public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService {
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private UserMapper userMapper;
/**
* create project
*
* @param loginUser login user
* @param name project name
* @param desc description
* @return returns an error if it exists
*/
@Override
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
Date now = new Date();
try {
project = Project
.newBuilder()
.name(name)
.code(CodeGenerateUtils.getInstance().genCode())
.description(desc)
.userId(loginUser.getId())
.userName(loginUser.getUserName())
.createTime(now)
.updateTime(now)
.build();
} catch (CodeGenerateException e) {
putMsg(result, Status.CREATE_PROJECT_ERROR);
return result;
}
if (projectMapper.insert(project) > 0) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
/**
* query project details by code
*
* @param projectCode project code
* @return project detail information
*/
@Override
public Map<String, Object> queryByCode(User loginUser, long projectCode) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Map<String, Object> queryByName(User loginUser, String projectName) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check project and authorization
*
* @param loginUser login user
* @param project project
* @param projectCode project code
* @return true if the login user have permission to see the project
*/
@Override
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, long projectCode) {
Map<String, Object> result = new HashMap<>();
if (project == null) {
putMsg(result, Status.PROJECT_NOT_EXIST);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectCode);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getCode());
} else {
checkResult = true;
}
return checkResult;
}
@Override
public boolean hasProjectAndPerm(User loginUser, Project project, Result result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
/**
* admin can view all projects
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return project list which the login user have permission to see
*/
@Override
public Result queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Result result = new Result();
PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<Project> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if (userId != 0) {
for (Project project : projectList) {
project.setPerm(Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotal((int) projectIPage.getTotal());
pageInfo.setTotalList(projectList);
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete project by code
*
* @param loginUser login user
* @param projectCode project code
* @return delete result code
*/
@Override
public Map<String, Object> deleteProject(User loginUser, Long projectCode) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(project.getCode());
if (!processDefinitionList.isEmpty()) {
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(project.getId());
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, project == null ? 0L : project.getCode());
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* updateProcessInstance project
*
* @param loginUser login user
* @param projectCode project code
* @param projectName project name
* @param desc description
* @param userName project owner
* @return update result code
*/
@Override
public Map<String, Object> update(User loginUser, Long projectCode, String projectName, String desc, String userName) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByCode(projectCode);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getCode() != project.getCode()) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
project.setUserId(user.getId());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
/**
* query unauthorized project
*
* @param loginUser login user
* @param userId user id
* @return the projects which user have not permission to see
*/
@Override
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (loginUser.getId() != userId && isNotAdmin(loginUser, result)) {
return result;
}
// query all project list except specified userId
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet;
if (projectList != null && !projectList.isEmpty()) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet;
if (authedProjectList != null && !authedProjectList.isEmpty()) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
/**
* query authorized project
*
* @param loginUser login user
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
@Override
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (loginUser.getId() != userId && isNotAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query authorized user
*
* @param loginUser login user
* @param projectCode project code
* @return users who have permission for the specified project
*/
@Override
public Map<String, Object> queryAuthorizedUser(User loginUser, Long projectCode) {
Map<String, Object> result = new HashMap<>();
// 1. check read permission
Project project = this.projectMapper.queryByCode(projectCode);
boolean hasProjectAndPerm = this.hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
// 2. query authorized user list
List<User> users = this.userMapper.queryAuthedUserListByProjectId(project.getId());
result.put(Constants.DATA_LIST, users);
this.putMsg(result, Status.SUCCESS);
return result;
}
/**
* query authorized project
*
* @param loginUser login user
* @return projects which the user have permission to see, Except for items created by this user
*/
@Override
public Map<String, Object> queryProjectCreatedByUser(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.queryProjectCreatedByUser(loginUser.getId());
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query authorized and user create project list by user
*
* @param loginUser login user
* @return project list
*/
@Override
public Map<String, Object> queryProjectCreatedAndAuthorizedByUser(User loginUser) {
Map<String, Object> result = new HashMap<>();
List<Project> projects = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) {
projects = projectMapper.selectList(null);
} else {
projects = projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId());
}
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
/**
* query permission id
*
* @param user user
* @param project project
* @return permission
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
/**
* query all project list
*
* @return project list
*/
@Override
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.queryAllProject();
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.ALIAS;
import static org.apache.dolphinscheduler.common.Constants.CONTENT;
import static org.apache.dolphinscheduler.common.Constants.JAR;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.ResourcesUser;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.apache.commons.beanutils.BeanMap;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.io.Files;
/**
* resources service impl
*/
@Service
public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService {
private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class);
@Autowired
private ResourceMapper resourcesMapper;
@Autowired
private UdfFuncMapper udfFunctionMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private UserMapper userMapper;
@Autowired
private ResourceUserMapper resourceUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create directory
*
* @param loginUser login user
* @param name alias
* @param description description
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create directory result
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> createDirectory(User loginUser,
String name,
String description,
ResourceType type,
int pid,
String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
result = verifyResource(loginUser, type, fullName, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource directory {} has exist, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now);
try {
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!"class".equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (DuplicateKeyException e) {
logger.error("resource directory {} has exist, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
} catch (Exception e) {
logger.error("resource already exists, can't recreate ", e);
throw new ServiceException("resource already exists, can't recreate");
}
//create directory in hdfs
createDirectory(loginUser,fullName,type,result);
return result;
}
/**
* create resource
*
* @param loginUser login user
* @param name alias
* @param desc description
* @param file file
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> createResource(User loginUser,
String name,
String desc,
ResourceType type,
MultipartFile file,
int pid,
String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
result = verifyPid(loginUser, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
result = verifyFile(name, type, file);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// check resource name exists
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name));
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now);
try {
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!"class".equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (Exception e) {
logger.error("resource already exists, can't recreate ", e);
throw new ServiceException("resource already exists, can't recreate");
}
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
}
return result;
}
/**
* check resource is exists
*
* @param fullName fullName
* @param type type
* @return true if resource exists
*/
private boolean checkResourceExists(String fullName, int type) {
Boolean existResource = resourcesMapper.existResource(fullName, type);
return existResource == Boolean.TRUE;
}
/**
* update resource
* @param loginUser login user
* @param resourceId resource id
* @param name name
* @param desc description
* @param type resource type
* @param file resource file
* @return update result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> updateResource(User loginUser,
int resourceId,
String name,
String desc,
ResourceType type,
MultipartFile file) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, resource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
putMsg(result, Status.SUCCESS);
return result;
}
//check resource already exists
String originFullName = resource.getFullName();
String originResourceName = resource.getAlias();
String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/") + 1),name);
if (!originResourceName.equals(name) && checkResourceExists(fullName, type.ordinal())) {
logger.error("resource {} already exists, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
result = verifyFile(name, type, file);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// query tenant by user id
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// verify whether the resource exists in storage
// get the path of origin file in storage
String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
try {
if (!HadoopUtils.getInstance().exists(originHdfsFileName)) {
logger.error("{} not exist", originHdfsFileName);
putMsg(result,Status.RESOURCE_NOT_EXIST);
return result;
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new ServiceException(Status.HDFS_OPERATION_ERROR);
}
if (!resource.isDirectory()) {
//get the origin file suffix
String originSuffix = Files.getFileExtension(originFullName);
String suffix = Files.getFileExtension(fullName);
boolean suffixIsChanged = false;
if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) {
suffixIsChanged = true;
}
if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) {
suffixIsChanged = true;
}
//verify whether suffix is changed
if (suffixIsChanged) {
//need verify whether this resource is authorized to other users
Map<String, Object> columnMap = new HashMap<>();
columnMap.put("resources_id", resourceId);
List<ResourcesUser> resourcesUsers = resourceUserMapper.selectByMap(columnMap);
if (CollectionUtils.isNotEmpty(resourcesUsers)) {
List<Integer> userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList());
List<User> users = userMapper.selectBatchIds(userIds);
String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString();
logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames);
putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames);
return result;
}
}
}
// updateResource data
Date now = new Date();
resource.setAlias(name);
resource.setFileName(name);
resource.setFullName(fullName);
resource.setDescription(desc);
resource.setUpdateTime(now);
if (file != null) {
resource.setSize(file.getSize());
}
try {
resourcesMapper.updateById(resource);
if (resource.isDirectory()) {
List<Integer> childrenResource = listAllChildren(resource,false);
if (CollectionUtils.isNotEmpty(childrenResource)) {
String matcherFullName = Matcher.quoteReplacement(fullName);
List<Resource> childResourceList;
Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]);
List<Resource> resourceList = resourcesMapper.listResourceByIds(childResIdArray);
childResourceList = resourceList.stream().map(t -> {
t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
resourcesMapper.batchUpdateResource(childResourceList);
if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
}
}
} else if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId});
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(fullName);
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
}
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (Exception e) {
logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e);
throw new ServiceException(Status.UPDATE_RESOURCE_ERROR);
}
// if name unchanged, return directly without moving on HDFS
if (originResourceName.equals(name) && file == null) {
return result;
}
if (file != null) {
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("upload resource: {} file: {} failed.", name, RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
}
if (!fullName.equals(originFullName)) {
try {
HadoopUtils.getInstance().delete(originHdfsFileName,false);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new ServiceException(String.format("delete resource: %s failed.", originFullName));
}
}
return result;
}
// get the path of dest file in hdfs
String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName);
try {
logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true);
} catch (Exception e) {
logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e);
putMsg(result,Status.HDFS_COPY_FAIL);
throw new ServiceException(Status.HDFS_COPY_FAIL);
}
return result;
}
private Result<Object> verifyFile(String name, ResourceType type, MultipartFile file) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (file != null) {
// file is empty
if (file.isEmpty()) {
logger.error("file is empty: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_FILE_IS_EMPTY);
return result;
}
// file suffix
String fileSuffix = Files.getFileExtension(file.getOriginalFilename());
String nameSuffix = Files.getFileExtension(name);
// determine file suffix
if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
// rename file suffix and original suffix must be consistent
logger.error("rename file suffix and original suffix must be consistent: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE);
return result;
}
//If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar
if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) {
logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg());
putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR);
return result;
}
if (file.getSize() > Constants.MAX_FILE_SIZE) {
logger.error("file size is too large: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT);
return result;
}
}
return result;
}
/**
* query resources list paging
*
* @param loginUser login user
* @param type resource type
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return resource list page
*/
@Override
public Result queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
Page<Resource> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
if (directoryId != -1) {
Resource directory = resourcesMapper.selectById(directoryId);
if (directory == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
}
List<Integer> resourcesIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 0);
IPage<Resource> resourceIPage = resourcesMapper.queryResourcePaging(page, userId, directoryId, type.ordinal(), searchVal,resourcesIds);
PageInfo<Resource> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int)resourceIPage.getTotal());
pageInfo.setTotalList(resourceIPage.getRecords());
result.setData(pageInfo);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* create directory
* @param loginUser login user
* @param fullName full name
* @param type resource type
* @param result Result
*/
private void createDirectory(User loginUser,String fullName,ResourceType type,Result<Object> result) {
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode);
try {
if (!HadoopUtils.getInstance().exists(resourceRootPath)) {
createTenantDirIfNotExists(tenantCode);
}
if (!HadoopUtils.getInstance().mkdir(directoryName)) {
logger.error("create resource directory {} of hdfs failed",directoryName);
putMsg(result,Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("create resource directory: %s failed.", directoryName));
}
} catch (Exception e) {
logger.error("create resource directory {} of hdfs failed",directoryName);
putMsg(result,Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("create resource directory: %s failed.", directoryName));
}
}
/**
* upload file to hdfs
*
* @param loginUser login user
* @param fullName full name
* @param file file
*/
private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) {
// save to local
String fileSuffix = Files.getFileExtension(file.getOriginalFilename());
String nameSuffix = Files.getFileExtension(fullName);
// determine file suffix
if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
return false;
}
// query tenant
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
// random file name
String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString());
// save file to hdfs, and delete original file
String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode);
try {
// if tenant dir not exists
if (!HadoopUtils.getInstance().exists(resourcePath)) {
createTenantDirIfNotExists(tenantCode);
}
org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename);
HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true);
} catch (Exception e) {
FileUtils.deleteFile(localFilename);
logger.error(e.getMessage(), e);
return false;
}
return true;
}
/**
* query resource list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@Override
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>();
List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type);
Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query resource list by program type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@Override
public Map<String, Object> queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) {
Map<String, Object> result = new HashMap<>();
List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type);
String suffix = ".jar";
if (programType != null) {
switch (programType) {
case JAVA:
case SCALA:
break;
case PYTHON:
suffix = ".py";
break;
default:
}
}
List<Resource> resources = new ResourceFilter(suffix, new ArrayList<>(allResourceList)).filter();
Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete resource
*
* @param loginUser login user
* @param resourceId resource id
* @return delete result code
* @throws IOException exception
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> delete(User loginUser, int resourceId) throws IOException {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// get resource by id
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, resource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResources();
Map<Integer, Set<Long>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
// get all children of the resource
List<Integer> allChildren = listAllChildren(resource,true);
Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]);
//if resource type is UDF,need check whether it is bound by UDF function
if (resource.getType() == (ResourceType.UDF)) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
logger.error("can't be deleted,because it is bound by UDF functions:{}", udfFuncs);
putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName());
return result;
}
}
if (resourceIdSet.contains(resource.getPid())) {
logger.error("can't be deleted,because it is used of process definition");
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
resourceIdSet.retainAll(allChildren);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
// get hdfs file by type
String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
//delete data in database
resourcesMapper.deleteIds(needDeleteResourceIdArray);
resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray);
//delete file on hdfs
HadoopUtils.getInstance().delete(hdfsFilename, true);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify resource by name and type
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @return true if the resource name not exists, otherwise return false
*/
@Override
public Result<Object> verifyResourceName(String fullName, ResourceType type, User loginUser) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource type:{} name:{} has exist, can't create again.", type, RegexUtils.escapeNRT(fullName));
putMsg(result, Status.RESOURCE_EXIST);
} else {
// query tenant
Tenant tenant = tenantMapper.queryById(loginUser.getTenantId());
if (tenant != null) {
String tenantCode = tenant.getTenantCode();
try {
String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
if (HadoopUtils.getInstance().exists(hdfsFilename)) {
logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, RegexUtils.escapeNRT(fullName), hdfsFilename);
putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename);
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result,Status.HDFS_OPERATION_ERROR);
}
} else {
putMsg(result,Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
}
}
return result;
}
/**
* verify resource by full name or pid and type
* @param fullName resource full name
* @param id resource id
* @param type resource type
* @return true if the resource full name or pid not exists, otherwise return false
*/
@Override
public Result<Object> queryResource(String fullName, Integer id, ResourceType type) {
Result<Object> result = new Result<>();
if (StringUtils.isBlank(fullName) && id == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
return result;
}
if (StringUtils.isNotBlank(fullName)) {
List<Resource> resourceList = resourcesMapper.queryResource(fullName,type.ordinal());
if (CollectionUtils.isEmpty(resourceList)) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
putMsg(result, Status.SUCCESS);
result.setData(resourceList.get(0));
} else {
Resource resource = resourcesMapper.selectById(id);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Resource parentResource = resourcesMapper.selectById(resource.getPid());
if (parentResource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
putMsg(result, Status.SUCCESS);
result.setData(parentResource);
}
return result;
}
/**
* view resource file online
*
* @param resourceId resource id
* @param skipLineNum skip line number
* @param limit limit
* @return resource content
*/
@Override
public Result<Object> readResource(int resourceId, int skipLineNum, int limit) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// get resource by id
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//check preview or not by file suffix
String nameSuffix = Files.getFileExtension(resource.getAlias());
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// hdfs path
String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName());
logger.info("resource hdfs path is {}", hdfsFileName);
try {
if (HadoopUtils.getInstance().exists(hdfsFileName)) {
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit);
putMsg(result, Status.SUCCESS);
Map<String, Object> map = new HashMap<>();
map.put(ALIAS, resource.getAlias());
map.put(CONTENT, String.join("\n", content));
result.setData(map);
} else {
logger.error("read file {} not exist in hdfs", hdfsFileName);
putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName);
}
} catch (Exception e) {
logger.error("Resource {} read failed", hdfsFileName, e);
putMsg(result, Status.HDFS_OPERATION_ERROR);
}
return result;
}
/**
* create resource file online
*
* @param loginUser login user
* @param type resource type
* @param fileName file name
* @param fileSuffix file suffix
* @param desc description
* @param content content
* @param pid pid
* @param currentDir current directory
* @return create result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
//check file suffix
String nameSuffix = fileSuffix.trim();
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support create", nameSuffix);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String name = fileName.trim() + "." + nameSuffix;
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
result = verifyResource(loginUser, type, fullName, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// save data
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now);
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
result = uploadContentToHdfs(fullName, tenantCode, content);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
throw new ServiceException(result.getMsg());
}
return result;
}
private Result<Object> checkResourceUploadStartupState() {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
return result;
}
private Result<Object> verifyResource(User loginUser, ResourceType type, String fullName, int pid) {
Result<Object> result = verifyResourceName(fullName, type, loginUser);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
return verifyPid(loginUser, pid);
}
private Result<Object> verifyPid(User loginUser, int pid) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid);
if (parentResource == null) {
putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, parentResource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
}
return result;
}
/**
* updateProcessInstance resource
*
* @param resourceId resource id
* @param content content
* @return update result cod
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> updateResourceContent(int resourceId, String content) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
logger.error("read file not exist, resource id {}", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//check can edit by file suffix
String nameSuffix = Files.getFileExtension(resource.getAlias());
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
resource.setSize(content.getBytes().length);
resource.setUpdateTime(new Date());
resourcesMapper.updateById(resource);
result = uploadContentToHdfs(resource.getFullName(), tenantCode, content);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
throw new ServiceException(result.getMsg());
}
return result;
}
/**
* @param resourceName resource name
* @param tenantCode tenant code
* @param content content
* @return result
*/
private Result<Object> uploadContentToHdfs(String resourceName, String tenantCode, String content) {
Result<Object> result = new Result<>();
String localFilename = "";
String hdfsFileName = "";
try {
localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString());
if (!FileUtils.writeContent2File(content, localFilename)) {
// write file fail
logger.error("file {} fail, content is {}", localFilename, RegexUtils.escapeNRT(content));
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// get resource file hdfs path
hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName);
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
logger.info("resource hdfs path is {}, resource dir is {}", hdfsFileName, resourcePath);
HadoopUtils hadoopUtils = HadoopUtils.getInstance();
if (!hadoopUtils.exists(resourcePath)) {
// create if tenant dir not exists
createTenantDirIfNotExists(tenantCode);
}
if (hadoopUtils.exists(hdfsFileName)) {
hadoopUtils.delete(hdfsFileName, false);
}
hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true);
} catch (Exception e) {
logger.error(e.getMessage(), e);
result.setCode(Status.HDFS_OPERATION_ERROR.getCode());
result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName));
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* download file
*
* @param resourceId resource id
* @return resource content
* @throws IOException exception
*/
@Override
public org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException {
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
throw new ServiceException("hdfs not startup");
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
logger.error("download file not exist, resource id {}", resourceId);
return null;
}
if (resource.isDirectory()) {
logger.error("resource id {} is directory,can't download it", resourceId);
throw new ServiceException("can't download directory");
}
int userId = resource.getUserId();
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("user id {} not exists", userId);
throw new ServiceException(String.format("resource owner id %d not exist",userId));
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant id {} not exists", user.getTenantId());
throw new ServiceException(String.format("The tenant id %d of resource owner not exist",user.getTenantId()));
}
String tenantCode = tenant.getTenantCode();
String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
String localFileName = FileUtils.getDownloadFilename(resource.getAlias());
logger.info("resource hdfs path is {}, download local filename is {}", hdfsFileName, localFileName);
HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true);
return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName);
}
/**
* list all file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> authorizeResourceTree(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<Resource> resourceList = resourcesMapper.queryResourceExceptUserId(userId);
List<ResourceComponent> list;
if (CollectionUtils.isNotEmpty(resourceList)) {
Visitor visitor = new ResourceTreeVisitor(resourceList);
list = visitor.visit().getChildren();
} else {
list = new ArrayList<>(0);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* unauthorized file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> unauthorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<Resource> resourceList = resourcesMapper.queryResourceExceptUserId(userId);
List<Resource> list;
if (resourceList != null && !resourceList.isEmpty()) {
Set<Resource> resourceSet = new HashSet<>(resourceList);
List<Resource> authedResourceList = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM);
getAuthorizedResourceList(resourceSet, authedResourceList);
list = new ArrayList<>(resourceSet);
} else {
list = new ArrayList<>(0);
}
Visitor visitor = new ResourceTreeVisitor(list);
result.put(Constants.DATA_LIST, visitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* unauthorized udf function
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (isNotAdmin(loginUser, result)) {
return result;
}
List<UdfFunc> udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId);
List<UdfFunc> resultList = new ArrayList<>();
Set<UdfFunc> udfFuncSet;
if (CollectionUtils.isNotEmpty(udfFuncList)) {
udfFuncSet = new HashSet<>(udfFuncList);
List<UdfFunc> authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId);
getAuthorizedResourceList(udfFuncSet, authedUDFFuncList);
resultList = new ArrayList<>(udfFuncSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized udf function
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
@Override
public Map<String, Object> authorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<UdfFunc> udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId);
result.put(Constants.DATA_LIST, udfFuncs);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized file
*
* @param loginUser login user
* @param userId user id
* @return authorized result
*/
@Override
public Map<String, Object> authorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<Resource> authedResources = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM);
Visitor visitor = new ResourceTreeVisitor(authedResources);
String visit = JSONUtils.toJsonString(visitor.visit(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(visit);
String jsonTreeStr = JSONUtils.toJsonString(visitor.visit().getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(jsonTreeStr);
result.put(Constants.DATA_LIST, visitor.visit().getChildren());
putMsg(result,Status.SUCCESS);
return result;
}
/**
* get authorized resource list
*
* @param resourceSet resource set
* @param authedResourceList authorized resource list
*/
private void getAuthorizedResourceList(Set<?> resourceSet, List<?> authedResourceList) {
Set<?> authedResourceSet;
if (CollectionUtils.isNotEmpty(authedResourceList)) {
authedResourceSet = new HashSet<>(authedResourceList);
resourceSet.removeAll(authedResourceSet);
}
}
/**
* get tenantCode by UserId
*
* @param userId user id
* @param result return result
* @return tenant code
*/
private String getTenantCode(int userId,Result<Object> result) {
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("user {} not exists", userId);
putMsg(result, Status.USER_NOT_EXIST,userId);
return null;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return null;
}
return tenant.getTenantCode();
}
/**
* list all children id
* @param resource resource
* @param containSelf whether add self to children list
* @return all children id
*/
List<Integer> listAllChildren(Resource resource,boolean containSelf) {
List<Integer> childList = new ArrayList<>();
if (resource.getId() != -1 && containSelf) {
childList.add(resource.getId());
}
if (resource.isDirectory()) {
listAllChildren(resource.getId(),childList);
}
return childList;
}
/**
* list all children id
* @param resourceId resource id
* @param childList child list
*/
void listAllChildren(int resourceId,List<Integer> childList) {
List<Integer> children = resourcesMapper.listChildren(resourceId);
for (int childId : children) {
childList.add(childId);
listAllChildren(childId, childList);
}
}
/**
* query authored resource list (own and authorized)
* @param loginUser login user
* @param type ResourceType
* @return all authored resource list
*/
private List<Resource> queryAuthoredResourceList(User loginUser, ResourceType type) {
List<Resource> relationResources;
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
relationResources = new ArrayList<>();
} else {
// query resource relation
relationResources = queryResourceList(userId, 0);
}
List<Resource> ownResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal());
ownResourceList.addAll(relationResources);
return ownResourceList;
}
/**
* query resource list by userId and perm
* @param userId userId
* @param perm perm
* @return resource list
*/
private List<Resource> queryResourceList(Integer userId, int perm) {
List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, perm);
return CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourcesMapper.queryResourceListById(resIds);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup;
import org.apache.dolphinscheduler.dao.entity.DatasourceUser;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.ResourcesUser;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* users service impl
*/
@Service
public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
@Autowired
private UserMapper userMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ResourceUserMapper resourceUserMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private AlertGroupMapper alertGroupMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* create user, only system admin have permission
*
* @param loginUser login user
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return create result code
* @throws Exception exception
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> createUser(User loginUser,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state) throws IOException {
Map<String, Object> result = new HashMap<>();
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg);
return result;
}
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!checkTenantExists(tenantId)) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
User user = createUser(userName, userPassword, email, tenantId, phone, queue, state);
Tenant tenant = tenantMapper.queryById(tenantId);
// resource upload startup
if (PropertyUtils.getResUploadStartupState()) {
// if tenant not exists
if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))) {
createTenantDirIfNotExists(tenant.getTenantCode());
}
String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(), user.getId());
HadoopUtils.getInstance().mkdir(userPath);
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
@Override
@Transactional(rollbackFor = RuntimeException.class)
public User createUser(String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state) {
User user = new User();
Date now = new Date();
user.setUserName(userName);
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
user.setEmail(email);
user.setTenantId(tenantId);
user.setPhone(phone);
user.setState(state);
// create general users, administrator users are currently built-in
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(now);
user.setUpdateTime(now);
if (StringUtils.isEmpty(queue)) {
queue = "";
}
user.setQueue(queue);
// save user
userMapper.insert(user);
return user;
}
/***
* create User for ldap login
*/
@Override
@Transactional(rollbackFor = Exception.class)
public User createUser(UserType userType, String userId, String email) {
User user = new User();
Date now = new Date();
user.setUserName(userId);
user.setEmail(email);
// create general users, administrator users are currently built-in
user.setUserType(userType);
user.setCreateTime(now);
user.setUpdateTime(now);
user.setQueue("");
// save user
userMapper.insert(user);
return user;
}
/**
* get user by user name
*
* @param userName user name
* @return exist user or null
*/
@Override
public User getUserByUserName(String userName) {
return userMapper.queryByUserNameAccurately(userName);
}
/**
* query user by id
*
* @param id id
* @return user info
*/
@Override
public User queryUser(int id) {
return userMapper.selectById(id);
}
@Override
public List<User> queryUser(List<Integer> ids) {
if (CollectionUtils.isEmpty(ids)) {
return new ArrayList<>();
}
return userMapper.selectByIds(ids);
}
/**
* query user
*
* @param name name
* @return user info
*/
@Override
public User queryUser(String name) {
return userMapper.queryByUserNameAccurately(name);
}
/**
* query user
*
* @param name name
* @param password password
* @return user info
*/
@Override
public User queryUser(String name, String password) {
String md5 = EncryptionUtils.getMd5(password);
return userMapper.queryUserByNamePassword(name, md5);
}
/**
* get user id by user name
*
* @param name user name
* @return if name empty 0, user not exists -1, user exist user id
*/
@Override
public int getUserIdByName(String name) {
//executor name query
int executorId = 0;
if (StringUtils.isNotEmpty(name)) {
User executor = queryUser(name);
if (null != executor) {
executorId = executor.getId();
} else {
executorId = -1;
}
}
return executorId;
}
/**
* query user list
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return user list page
*/
@Override
public Result queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
Page<User> page = new Page<>(pageNo, pageSize);
IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal);
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) scheduleList.getTotal());
pageInfo.setTotalList(scheduleList.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance user
*
* @param userId user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return update result code
* @throws Exception exception
*/
@Override
public Map<String, Object> updateUser(User loginUser, int userId,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state) throws IOException {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) {
putMsg(result, Status.USER_NAME_EXIST);
return result;
}
user.setUserName(userName);
}
if (StringUtils.isNotEmpty(userPassword)) {
if (!CheckUtils.checkPassword(userPassword)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userPassword);
return result;
}
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
}
if (StringUtils.isNotEmpty(email)) {
if (!CheckUtils.checkEmail(email)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email);
return result;
}
user.setEmail(email);
}
if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone);
return result;
}
if (state == 0 && user.getState() != state && loginUser.getId() == user.getId()) {
putMsg(result, Status.NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT);
return result;
}
user.setPhone(phone);
user.setQueue(queue);
user.setState(state);
Date now = new Date();
user.setUpdateTime(now);
//if user switches the tenant, the user's resources need to be copied to the new tenant
if (user.getTenantId() != tenantId) {
Tenant oldTenant = tenantMapper.queryById(user.getTenantId());
//query tenant
Tenant newTenant = tenantMapper.queryById(tenantId);
if (newTenant != null) {
// if hdfs startup
if (PropertyUtils.getResUploadStartupState() && oldTenant != null) {
String newTenantCode = newTenant.getTenantCode();
String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode());
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode());
// if old tenant dir exists
if (HadoopUtils.getInstance().exists(oldResourcePath)) {
String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode);
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode);
//file resources list
List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath);
}
//udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath);
}
//Delete the user from the old tenant directory
String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(), userId);
HadoopUtils.getInstance().delete(oldUserPath, true);
} else {
// if old tenant dir not exists , create
createTenantDirIfNotExists(oldTenant.getTenantCode());
}
if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))) {
//create user in the new tenant directory
String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(), user.getId());
HadoopUtils.getInstance().mkdir(newUserPath);
} else {
// if new tenant dir not exists , create
createTenantDirIfNotExists(newTenant.getTenantCode());
}
}
}
user.setTenantId(tenantId);
}
// updateProcessInstance user
userMapper.updateById(user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete user
*
* @param loginUser login user
* @param id user id
* @return delete result code
* @throws Exception exception when operate hdfs
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteUserById(User loginUser, int id) throws IOException {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
return result;
}
//check exist
User tempUser = userMapper.selectById(id);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
// check if is a project owner
List<Project> projects = projectMapper.queryProjectCreatedByUser(id);
if (CollectionUtils.isNotEmpty(projects)) {
String projectNames = projects.stream().map(Project::getName).collect(Collectors.joining(","));
putMsg(result, Status.TRANSFORM_PROJECT_OWNERSHIP, projectNames);
return result;
}
// delete user
User user = userMapper.queryTenantCodeByUserId(id);
if (user != null) {
if (PropertyUtils.getResUploadStartupState()) {
String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(), id);
if (HadoopUtils.getInstance().exists(userPath)) {
HadoopUtils.getInstance().delete(userPath, true);
}
}
}
accessTokenMapper.deleteAccessTokenByUserId(id);
userMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
//check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
//if the selected projectIds are empty, delete all items associated with the user
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) {
projectUserMapper.deleteProjectRelation(0, userId);
return result;
}
String[] projectIdArr = projectIds.split(",");
for (String projectId : projectIdArr) {
Date now = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(Integer.parseInt(projectId));
projectUser.setPerm(7);
projectUser.setCreateTime(now);
projectUser.setUpdateTime(now);
projectUserMapper.insert(projectUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project by code
*
* @param loginUser login user
* @param userId user id
* @param projectCode project code
* @return grant result code
*/
@Override
public Map<String, Object> grantProjectByCode(final User loginUser, final int userId, final long projectCode) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
// 1. check if user is existed
User tempUser = this.userMapper.selectById(userId);
if (tempUser == null) {
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
// 2. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 3. only project owner can operate
if (!this.hasPerm(loginUser, project.getUserId())) {
this.putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// 4. maintain the relationship between project and user
final Date today = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(project.getId());
projectUser.setPerm(7);
projectUser.setCreateTime(today);
projectUser.setUpdateTime(today);
this.projectUserMapper.insert(projectUser);
this.putMsg(result, Status.SUCCESS);
return result;
}
/**
* revoke the project permission for specified user.
* @param loginUser Login user
* @param userId User id
* @param projectCode Project Code
* @return
*/
@Override
public Map<String, Object> revokeProject(User loginUser, int userId, long projectCode) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
// 1. only admin can operate
if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
// 2. check if user is existed
User user = this.userMapper.selectById(userId);
if (user == null) {
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
// 3. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 4. delete th relationship between project and user
this.projectUserMapper.deleteProjectRelation(project.getId(), user.getId());
this.putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant resource
*
* @param loginUser login user
* @param userId user id
* @param resourceIds resource id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
Set<Integer> needAuthorizeResIds = new HashSet<>();
if (StringUtils.isNotBlank(resourceIds)) {
String[] resourceFullIdArr = resourceIds.split(",");
// need authorize resource id set
for (String resourceFullId : resourceFullIdArr) {
String[] resourceIdArr = resourceFullId.split("-");
for (int i = 0; i <= resourceIdArr.length - 1; i++) {
int resourceIdValue = Integer.parseInt(resourceIdArr[i]);
needAuthorizeResIds.add(resourceIdValue);
}
}
}
//get the authorized resource id list by user id
List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, Constants.AUTHORIZE_WRITABLE_PERM);
List<Resource> oldAuthorizedRes = CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourceMapper.queryResourceListById(resIds);
//if resource type is UDF,need check whether it is bound by UDF function
Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(Resource::getId).collect(Collectors.toSet());
//get the unauthorized resource id list
oldAuthorizedResIds.removeAll(needAuthorizeResIds);
if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) {
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId);
Map<Integer, Set<Long>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}", resId, resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
}
resourceUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
return result;
}
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Date now = new Date();
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setUserId(userId);
resourcesUser.setResourcesId(resourceIdValue);
if (resource.isDirectory()) {
resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM);
} else {
resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
}
resourcesUser.setCreateTime(now);
resourcesUser.setUpdateTime(now);
resourceUserMapper.insert(resourcesUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
return result;
}
String[] resourcesIdArr = udfIds.split(",");
for (String udfId : resourcesIdArr) {
Date now = new Date();
UDFUser udfUser = new UDFUser();
udfUser.setUserId(userId);
udfUser.setUdfId(Integer.parseInt(udfId));
udfUser.setPerm(7);
udfUser.setCreateTime(now);
udfUser.setUpdateTime(now);
udfUserMapper.insert(udfUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
datasourceUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) {
return result;
}
String[] datasourceIdArr = datasourceIds.split(",");
for (String datasourceId : datasourceIdArr) {
Date now = new Date();
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setUserId(userId);
datasourceUser.setDatasourceId(Integer.parseInt(datasourceId));
datasourceUser.setPerm(7);
datasourceUser.setCreateTime(now);
datasourceUser.setUpdateTime(now);
datasourceUserMapper.insert(datasourceUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user info
*
* @param loginUser login user
* @return user info
*/
@Override
public Map<String, Object> getUserInfo(User loginUser) {
Map<String, Object> result = new HashMap<>();
User user = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) {
user = loginUser;
} else {
user = userMapper.queryDetailsById(loginUser.getId());
List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId());
StringBuilder sb = new StringBuilder();
if (alertGroups != null && !alertGroups.isEmpty()) {
for (int i = 0; i < alertGroups.size() - 1; i++) {
sb.append(alertGroups.get(i).getGroupName() + ",");
}
sb.append(alertGroups.get(alertGroups.size() - 1));
user.setAlertGroup(sb.toString());
}
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
@Override
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryAllGeneralUser();
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
@Override
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify user name exists
*
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
@Override
public Result<Object> verifyUserName(String userName) {
Result<Object> result = new Result<>();
User user = userMapper.queryByUserNameAccurately(userName);
if (user != null) {
putMsg(result, Status.USER_NAME_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* unauthorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
@Override
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null);
List<User> resultUsers = new ArrayList<>();
Set<User> userSet = null;
if (userList != null && !userList.isEmpty()) {
userSet = new HashSet<>(userList);
List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId);
Set<User> authedUserSet = null;
if (authedUserList != null && !authedUserList.isEmpty()) {
authedUserSet = new HashSet<>(authedUserList);
userSet.removeAll(authedUserSet);
}
resultUsers = new ArrayList<>(userSet);
}
result.put(Constants.DATA_LIST, resultUsers);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
@Override
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* @param tenantId tenant id
* @return true if tenant exists, otherwise return false
*/
private boolean checkTenantExists(int tenantId) {
return tenantMapper.queryById(tenantId) != null;
}
/**
* @return if check failed return the field, otherwise return null
*/
private String checkUserParams(String userName, String password, String email, String phone) {
String msg = null;
if (!CheckUtils.checkUserName(userName)) {
msg = userName;
} else if (!CheckUtils.checkPassword(password)) {
msg = password;
} else if (!CheckUtils.checkEmail(email)) {
msg = email;
} else if (!CheckUtils.checkPhone(phone)) {
msg = phone;
}
return msg;
}
/**
* copy resource files
*
* @param resourceComponent resource component
* @param srcBasePath src base path
* @param dstBasePath dst base path
* @throws IOException io exception
*/
private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException {
List<ResourceComponent> components = resourceComponent.getChildren();
if (CollectionUtils.isNotEmpty(components)) {
for (ResourceComponent component : components) {
// verify whether exist
if (!HadoopUtils.getInstance().exists(String.format("%s/%s", srcBasePath, component.getFullName()))) {
logger.error("resource file: {} not exist,copy error", component.getFullName());
throw new ServiceException(Status.RESOURCE_NOT_EXIST);
}
if (!component.isDirctory()) {
// copy it to dst
HadoopUtils.getInstance().copy(String.format("%s/%s", srcBasePath, component.getFullName()), String.format("%s/%s", dstBasePath, component.getFullName()), false, true);
continue;
}
if (CollectionUtils.isEmpty(component.getChildren())) {
// if not exist,need create it
if (!HadoopUtils.getInstance().exists(String.format("%s/%s", dstBasePath, component.getFullName()))) {
HadoopUtils.getInstance().mkdir(String.format("%s/%s", dstBasePath, component.getFullName()));
}
} else {
copyResourceFiles(component, srcBasePath, dstBasePath);
}
}
}
}
/**
* registry user, default state is 0, default tenant_id is 1, no phone, no queue
*
* @param userName user name
* @param userPassword user password
* @param repeatPassword repeat password
* @param email email
* @return registry result code
* @throws Exception exception
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> registerUser(String userName, String userPassword, String repeatPassword, String email) {
Map<String, Object> result = new HashMap<>();
//check user params
String msg = this.checkUserParams(userName, userPassword, email, "");
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg);
return result;
}
if (!userPassword.equals(repeatPassword)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same");
return result;
}
User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, user);
return result;
}
/**
* activate user, only system admin have permission, change user state code 0 to 1
*
* @param loginUser login user
* @param userName user name
* @return create result code
*/
@Override
public Map<String, Object> activateUser(User loginUser, String userName) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!CheckUtils.checkUserName(userName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
if (user.getState() != Flag.NO.ordinal()) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
user.setState(Flag.YES.ordinal());
Date now = new Date();
user.setUpdateTime(now);
userMapper.updateById(user);
User responseUser = userMapper.queryByUserNameAccurately(userName);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, responseUser);
return result;
}
/**
* activate user, only system admin have permission, change users state code 0 to 1
*
* @param loginUser login user
* @param userNames user name
* @return create result code
*/
@Override
public Map<String, Object> batchActivateUser(User loginUser, List<String> userNames) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
int totalSuccess = 0;
List<String> successUserNames = new ArrayList<>();
Map<String, Object> successRes = new HashMap<>();
int totalFailed = 0;
List<Map<String, String>> failedInfo = new ArrayList<>();
Map<String, Object> failedRes = new HashMap<>();
for (String userName : userNames) {
Map<String, Object> tmpResult = activateUser(loginUser, userName);
if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) {
totalFailed++;
Map<String, String> failedBody = new HashMap<>();
failedBody.put("userName", userName);
Status status = (Status) tmpResult.get(Constants.STATUS);
String errorMessage = MessageFormat.format(status.getMsg(), userName);
failedBody.put("msg", errorMessage);
failedInfo.add(failedBody);
} else {
totalSuccess++;
successUserNames.add(userName);
}
}
successRes.put("sum", totalSuccess);
successRes.put("userName", successUserNames);
failedRes.put("sum", totalFailed);
failedRes.put("info", failedInfo);
Map<String, Object> res = new HashMap<>();
res.put("success", successRes);
res.put("failed", failedRes);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, res);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.DataSourceServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive.HiveDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MySQLDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle.OracleDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql.PostgreSQLDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils;
import org.apache.dolphinscheduler.spi.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbConnectType;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
/**
* data source service test
*/
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
@PrepareForTest({DataSourceUtils.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class})
public class DataSourceServiceTest {
@InjectMocks
private DataSourceServiceImpl dataSourceService;
@Mock
private DataSourceMapper dataSourceMapper;
@Mock
private DataSourceUserMapper datasourceUserMapper;
public void createDataSourceTest() {
User loginUser = getAdminUser();
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
PostgreSQLDataSourceParamDTO postgreSqlDatasourceParam = new PostgreSQLDataSourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
// data source exits
List<DataSource> dataSourceList = new ArrayList<>();
DataSource dataSource = new DataSource();
dataSource.setName(dataSourceName);
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList);
Result dataSourceExitsResult = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceExitsResult.getCode().intValue());
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(postgreSqlDatasourceParam);
DbType dataSourceType = postgreSqlDatasourceParam.getType();
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
Result connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg());
//PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
PowerMockito.doReturn(connectionResult).when(dataSourceService).checkConnection(dataSourceType, connectionParam);
Result connectFailedResult = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailedResult.getCode().intValue());
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result notValidError = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), notValidError.getCode().intValue());
// success
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result success = dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
}
public void updateDataSourceTest() {
User loginUser = getAdminUser();
int dataSourceId = 12;
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
PostgreSQLDataSourceParamDTO postgreSqlDatasourceParam = new PostgreSQLDataSourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
// data source not exits
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Result resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), resourceNotExits.getCode().intValue());
// user no operation perm
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Result userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), userNoOperationPerm.getCode().intValue());
// data source name exits
dataSource.setUserId(-1);
List<DataSource> dataSourceList = new ArrayList<>();
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList);
Result dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceNameExist.getCode().intValue());
// data source connect failed
DbType dataSourceType = postgreSqlDatasourceParam.getType();
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(postgreSqlDatasourceParam);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
Result connectionResult = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailed.getCode().intValue());
//success
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(), Status.DATASOURCE_CONNECT_FAILED.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, connectionParam)).thenReturn(connectionResult);
Result success = dataSourceService.updateDataSource(dataSourceId, loginUser, postgreSqlDatasourceParam);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
}
@Test
public void queryDataSourceListPagingTest() {
User loginUser = getAdminUser();
String searchVal = "";
int pageNo = 1;
int pageSize = 10;
Result result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
Assert.assertEquals(Status.SUCCESS.getCode(),(int)result.getCode());
}
@Test
public void connectionTest() {
int dataSourceId = -1;
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Result result = dataSourceService.connectionTest(dataSourceId);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), result.getCode().intValue());
}
@Test
public void deleteTest() {
User loginUser = getAdminUser();
int dataSourceId = 1;
Result result = new Result();
//resource not exist
dataSourceService.putMsg(result, Status.RESOURCE_NOT_EXIST);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// user no operation perm
dataSourceService.putMsg(result, Status.USER_NO_OPERATION_PERM);
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// success
dataSourceService.putMsg(result, Status.SUCCESS);
dataSource.setUserId(-1);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
}
@Test
public void unauthDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void authedDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void queryDataSourceListTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal());
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS));
}
@Test
public void verifyDataSourceNameTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
String dataSourceName = "dataSource1";
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(getDataSourceList());
Result result = dataSourceService.verifyDataSourceName(dataSourceName);
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(), result.getMsg());
}
@Test
public void queryDataSourceTest() {
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null);
Map<String, Object> result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.RESOURCE_NOT_EXIST.getCode());
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(getOracleDataSource());
result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.SUCCESS.getCode());
}
private List<DataSource> getDataSourceList() {
List<DataSource> dataSources = new ArrayList<>();
dataSources.add(getOracleDataSource());
return dataSources;
}
private DataSource getOracleDataSource() {
DataSource dataSource = new DataSource();
dataSource.setName("test");
dataSource.setNote("Note");
dataSource.setType(DbType.ORACLE);
dataSource.setConnectionParams("{\"connectType\":\"ORACLE_SID\",\"address\":\"jdbc:oracle:thin:@192.168.xx.xx:49161\",\"database\":\"XE\","
+ "\"jdbcUrl\":\"jdbc:oracle:thin:@192.168.xx.xx:49161/XE\",\"user\":\"system\",\"password\":\"oracle\"}");
return dataSource;
}
@Test
public void buildParameter() {
OracleDataSourceParamDTO oracleDatasourceParamDTO = new OracleDataSourceParamDTO();
oracleDatasourceParamDTO.setHost("192.168.9.1");
oracleDatasourceParamDTO.setPort(1521);
oracleDatasourceParamDTO.setDatabase("im");
oracleDatasourceParamDTO.setUserName("test");
oracleDatasourceParamDTO.setPassword("test");
oracleDatasourceParamDTO.setConnectType(DbConnectType.ORACLE_SERVICE_NAME);
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(oracleDatasourceParamDTO);
String expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\","
+ "\"driverClassName\":\"oracle.jdbc.OracleDriver\",\"validationQuery\":\"select 1 from dual\",\"connectType\":\"ORACLE_SERVICE_NAME\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
PowerMockito.mockStatic(CommonUtils.class);
PowerMockito.mockStatic(PasswordUtils.class);
PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(true);
PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test");
HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO();
hiveDataSourceParamDTO.setHost("192.168.9.1");
hiveDataSourceParamDTO.setPort(10000);
hiveDataSourceParamDTO.setDatabase("im");
hiveDataSourceParamDTO.setPrincipal("hive/hdfs-mycluster@ESZ.COM");
hiveDataSourceParamDTO.setUserName("test");
hiveDataSourceParamDTO.setPassword("test");
hiveDataSourceParamDTO.setJavaSecurityKrb5Conf("/opt/krb5.conf");
hiveDataSourceParamDTO.setLoginUserKeytabPath("/opt/hdfs.headless.keytab");
hiveDataSourceParamDTO.setLoginUserKeytabUsername("test2/hdfs-mycluster@ESZ.COM");
connectionParam = DataSourceUtils.buildConnectionParams(hiveDataSourceParamDTO);
expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:hive2://192.168.9.1:10000\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im\",\"driverClassName\":\"org.apache.hive.jdbc.HiveDriver\",\"validationQuery\":\"select 1\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\",\"javaSecurityKrb5Conf\":\"/opt/krb5.conf\",\"loginUserKeytabUsername\":\"test2/hdfs-mycluster@ESZ.COM\",\"loginUserKeytabPath\":\"/opt/hdfs.headless.keytab\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
}
@Test
public void buildParameterWithDecodePassword() {
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
Map<String, String> other = new HashMap<>();
other.put("autoDeserialize", "yes");
other.put("allowUrlInLocalInfile", "true");
MySQLDataSourceParamDTO mysqlDatasourceParamDTO = new MySQLDataSourceParamDTO();
mysqlDatasourceParamDTO.setHost("192.168.9.1");
mysqlDatasourceParamDTO.setPort(1521);
mysqlDatasourceParamDTO.setDatabase("im");
mysqlDatasourceParamDTO.setUserName("test");
mysqlDatasourceParamDTO.setPassword("123456");
mysqlDatasourceParamDTO.setOther(other);
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(mysqlDatasourceParamDTO);
String expected = "{\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\",\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/"
+ "im\",\"driverClassName\":\"com.mysql.cj.jdbc.Driver\",\"validationQuery\":\"select 1\",\"props\":{\"autoDeserialize\":\"yes\",\"allowUrlInLocalInfile\":\"true\"}}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
mysqlDatasourceParamDTO = new MySQLDataSourceParamDTO();
mysqlDatasourceParamDTO.setHost("192.168.9.1");
mysqlDatasourceParamDTO.setPort(1521);
mysqlDatasourceParamDTO.setDatabase("im");
mysqlDatasourceParamDTO.setUserName("test");
mysqlDatasourceParamDTO.setPassword("123456");
connectionParam = DataSourceUtils.buildConnectionParams(mysqlDatasourceParamDTO);
expected = "{\"user\":\"test\",\"password\":\"123456\",\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\","
+ "\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"driverClassName\":\"com.mysql.cj.jdbc.Driver\",\"validationQuery\":\"select 1\"}";
Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam));
}
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserName("admin");
loginUser.setUserType(UserType.GENERAL_USER);
return loginUser;
}
/**
* test check connection
*/
@Test
public void testCheckConnection() throws Exception {
DbType dataSourceType = DbType.POSTGRESQL;
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
PostgreSQLDataSourceParamDTO postgreSqlDatasourceParam = new PostgreSQLDataSourceParamDTO();
postgreSqlDatasourceParam.setDatabase(dataSourceName);
postgreSqlDatasourceParam.setNote(dataSourceDesc);
postgreSqlDatasourceParam.setHost("172.16.133.200");
postgreSqlDatasourceParam.setPort(5432);
postgreSqlDatasourceParam.setDatabase("dolphinscheduler");
postgreSqlDatasourceParam.setUserName("postgres");
postgreSqlDatasourceParam.setPassword("");
ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(postgreSqlDatasourceParam);
PowerMockito.mockStatic(DataSourceUtils.class);
PowerMockito.mockStatic(DataSourceClientProvider.class);
DataSourceClientProvider clientProvider = PowerMockito.mock(DataSourceClientProvider.class);
PowerMockito.when(DataSourceClientProvider.getInstance()).thenReturn(clientProvider);
Result result = dataSourceService.checkConnection(dataSourceType, connectionParam);
Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue());
Connection connection = PowerMockito.mock(Connection.class);
PowerMockito.when(clientProvider.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection);
result = dataSourceService.checkConnection(dataSourceType, connectionParam);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.commons.collections.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* project service test
**/
@RunWith(MockitoJUnitRunner.class)
public class ProjectServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class);
@InjectMocks
private ProjectServiceImpl projectService;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
@Mock
private UserMapper userMapper;
private String projectName = "ProjectServiceTest";
private String userName = "ProjectServiceTest";
@Test
public void testCreateProject() {
User loginUser = getLoginUser();
loginUser.setId(1);
Map<String, Object> result = projectService.createProject(loginUser, projectName, getDesc());
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//project name exist
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject());
result = projectService.createProject(loginUser, projectName, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.insert(Mockito.any(Project.class))).thenReturn(1);
result = projectService.createProject(loginUser, "test", "test");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCheckProjectAndAuth() {
long projectCode = 1L;
Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, null, projectCode);
logger.info(result.toString());
Status status = (Status) result.get(Constants.STATUS);
Assert.assertEquals(Status.PROJECT_NOT_EXIST, result.get(Constants.STATUS));
Project project = getProject();
//USER_NO_OPERATION_PROJECT_PERM
project.setUserId(2);
result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//success
project.setUserId(1);
result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
Map<String, Object> result2 = new HashMap<>();
result2 = projectService.checkProjectAndAuth(loginUser, null, projectCode);
Assert.assertEquals(Status.PROJECT_NOT_EXIST, result2.get(Constants.STATUS));
Project project1 = getProject();
// USER_NO_OPERATION_PROJECT_PERM
project1.setUserId(2);
result2 = projectService.checkProjectAndAuth(loginUser, project1, projectCode);
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS));
//success
project1.setUserId(1);
projectService.checkProjectAndAuth(loginUser, project1, projectCode);
}
@Test
public void testHasProjectAndPerm() {
// Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
Project project = getProject();
Map<String, Object> result = new HashMap<>();
// not exist user
User tempUser = new User();
tempUser.setId(Integer.MAX_VALUE);
boolean checkResult = projectService.hasProjectAndPerm(tempUser, project, result);
logger.info(result.toString());
Assert.assertFalse(checkResult);
//success
result = new HashMap<>();
project.setUserId(1);
checkResult = projectService.hasProjectAndPerm(loginUser, project, result);
logger.info(result.toString());
Assert.assertTrue(checkResult);
}
@Test
public void testQueryProjectListPaging() {
IPage<Project> page = new Page<>(1, 10);
page.setRecords(getList());
page.setTotal(1L);
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(1), Mockito.eq(projectName))).thenReturn(page);
User loginUser = getLoginUser();
// project owner
Result result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
PageInfo<Project> pageInfo = (PageInfo<Project>) result.getData();
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList()));
//admin
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(0), Mockito.eq(projectName))).thenReturn(page);
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
pageInfo = (PageInfo<Project>) result.getData();
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList()));
}
@Test
public void testDeleteProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryByCode(1L)).thenReturn(getProject());
//PROJECT_NOT_FOUNT
Map<String, Object> result = projectService.deleteProject(loginUser, 11L);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_EXIST, result.get(Constants.STATUS));
loginUser.setId(2);
//USER_NO_OPERATION_PROJECT_PERM
result = projectService.deleteProject(loginUser, 1L);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//DELETE_PROJECT_ERROR_DEFINES_NOT_NULL
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1L)).thenReturn(getProcessDefinitions());
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.deleteProject(loginUser, 1L);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.deleteById(1)).thenReturn(1);
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1L)).thenReturn(new ArrayList<>());
result = projectService.deleteProject(loginUser, 1L);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testUpdate() {
User loginUser = getLoginUser();
Project project = getProject();
project.setCode(2L);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when(projectMapper.queryByCode(2L)).thenReturn(getProject());
// PROJECT_NOT_FOUNT
Map<String, Object> result = projectService.update(loginUser, 1L, projectName, "desc", "testUser");
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUND, result.get(Constants.STATUS));
//PROJECT_ALREADY_EXISTS
result = projectService.update(loginUser, 2L, projectName, "desc", userName);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
Mockito.when(userMapper.queryByUserNameAccurately(Mockito.any())).thenReturn(null);
result = projectService.update(loginUser, 2L, "test", "desc", "testuser");
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
Mockito.when(userMapper.queryByUserNameAccurately(Mockito.any())).thenReturn(new User());
project.setUserId(1);
Mockito.when(projectMapper.updateById(Mockito.any(Project.class))).thenReturn(1);
result = projectService.update(loginUser, 2L, "test", "desc", "testUser");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryAuthorizedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
//USER_NO_OPERATION_PERM
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, 3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryAuthorizedProject(loginUser, 1);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
loginUser.setUserType(UserType.GENERAL_USER);
result = projectService.queryAuthorizedProject(loginUser, loginUser.getId());
projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryAuthorizedUser() {
final User loginUser = this.getLoginUser();
// Failure 1: PROJECT_NOT_FOUND
Map<String, Object> result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L);
logger.info("FAILURE 1: {}", result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUND, result.get(Constants.STATUS));
// Failure 2: USER_NO_OPERATION_PROJECT_PERM
loginUser.setId(100);
Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(this.getProject());
result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L);
logger.info("FAILURE 2: {}", result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
// SUCCESS
loginUser.setUserType(UserType.ADMIN_USER);
Mockito.when(this.userMapper.queryAuthedUserListByProjectId(1)).thenReturn(this.getUserList());
result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L);
logger.info("SUCCESS 1: {}", result.toString());
List<User> users = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(users));
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L);
logger.info("SUCCESS 2: {}", result.toString());
users = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(users));
}
@Test
public void testQueryCreatedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(getList());
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryProjectCreatedAndAuthorizedByUser() {
Map<String, Object> result = null;
User loginUser = getLoginUser();
// not admin user
Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(1)).thenReturn(getList());
result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser);
List<Project> notAdminUserResult = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(notAdminUserResult));
//admin user
loginUser.setUserType(UserType.ADMIN_USER);
Mockito.when(projectMapper.selectList(null)).thenReturn(getList());
result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser);
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryAllProjectList() {
Mockito.when(projectMapper.queryAllProject()).thenReturn(getList());
Map<String, Object> result = projectService.queryAllProjectList();
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryUnauthorizedProject() {
// Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
Mockito.when(projectMapper.queryProjectExceptUserId(2)).thenReturn(getList());
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, 2);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
private Project getProject() {
Project project = new Project();
project.setCode(1L);
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
}
private List<Project> getList() {
List<Project> list = new ArrayList<>();
list.add(getProject());
return list;
}
/**
* create admin user
*/
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName(userName);
loginUser.setId(1);
return loginUser;
}
/**
* Get general user
* @return
*/
private User getGeneralUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
return user;
}
/**
* Get user list
* @return
*/
private List<User> getUserList() {
List<User> userList = new ArrayList<>();
userList.add(this.getGeneralUser());
return userList;
}
/**
* get project user
*/
private ProjectUser getProjectUser() {
ProjectUser projectUser = new ProjectUser();
projectUser.setProjectId(1);
projectUser.setUserId(1);
return projectUser;
}
private List<ProcessDefinition> getProcessDefinitions() {
List<ProcessDefinition> list = new ArrayList<>();
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectCode(1L);
list.add(processDefinition);
return list;
}
private List<Integer> getProjectIds() {
return Collections.singletonList(1);
}
private String getDesc() {
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe"
+ ".deleteProjectRelation(projectId,userId)projectUserMappe"
+ "r.deleteProjectRelation(projectId,userId)projectUserMapper"
+ ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.commons.collections.CollectionUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockMultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.io.Files;
/**
* resources service test
*/
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
@PrepareForTest({HadoopUtils.class, PropertyUtils.class,
FileUtils.class, org.apache.dolphinscheduler.api.utils.FileUtils.class,
Files.class})
public class ResourcesServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class);
@InjectMocks
private ResourcesServiceImpl resourcesService;
@Mock
private ResourceMapper resourcesMapper;
@Mock
private TenantMapper tenantMapper;
@Mock
private HadoopUtils hadoopUtils;
@Mock
private UserMapper userMapper;
@Mock
private UdfFuncMapper udfFunctionMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
@Mock
private ResourceUserMapper resourceUserMapper;
@Before
public void setUp() {
PowerMockito.mockStatic(HadoopUtils.class);
PowerMockito.mockStatic(FileUtils.class);
PowerMockito.mockStatic(Files.class);
PowerMockito.mockStatic(org.apache.dolphinscheduler.api.utils.FileUtils.class);
try {
// new HadoopUtils
PowerMockito.whenNew(HadoopUtils.class).withNoArguments().thenReturn(hadoopUtils);
} catch (Exception e) {
e.printStackTrace();
}
PowerMockito.when(HadoopUtils.getInstance()).thenReturn(hadoopUtils);
PowerMockito.mockStatic(PropertyUtils.class);
}
@Test
public void testCreateResource() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
User user = new User();
//HDFS_NOT_STARTUP
Result result = resourcesService.createResource(user, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_FILE_IS_EMPTY
MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf", "".getBytes());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
result = resourcesService.createResource(user, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, mockMultipartFile, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(), result.getMsg());
//RESOURCE_SUFFIX_FORBID_CHANGE
mockMultipartFile = new MockMultipartFile("test.pdf", "test.pdf", "pdf", "test".getBytes());
PowerMockito.when(Files.getFileExtension("test.pdf")).thenReturn("pdf");
PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar");
result = resourcesService.createResource(user, "ResourcesServiceTest.jar", "ResourcesServiceTest", ResourceType.FILE, mockMultipartFile, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(), result.getMsg());
//UDF_RESOURCE_SUFFIX_NOT_JAR
mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf", "ResourcesServiceTest.pdf", "pdf", "test".getBytes());
PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.pdf")).thenReturn("pdf");
result = resourcesService.createResource(user, "ResourcesServiceTest.pdf", "ResourcesServiceTest", ResourceType.UDF, mockMultipartFile, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(), result.getMsg());
}
@Test
public void testCreateDirecotry() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
User user = new User();
//HDFS_NOT_STARTUP
Result result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//PARENT_RESOURCE_NOT_EXIST
user.setId(1);
user.setTenantId(1);
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, 1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.PARENT_RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//RESOURCE_EXIST
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(resourcesMapper.existResource("/directoryTest", 0)).thenReturn(true);
result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg());
}
@Test
public void testUpdateResource() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
User user = new User();
//HDFS_NOT_STARTUP
Result result = resourcesService.updateResource(user, 1, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null);
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_NOT_EXIST
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
result = resourcesService.updateResource(user, 0, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//USER_NO_OPERATION_PERM
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(), result.getMsg());
//RESOURCE_NOT_EXIST
user.setId(1);
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(), Mockito.anyString())).thenReturn("test1");
try {
Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(false);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//SUCCESS
user.setId(1);
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
try {
Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(true);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest.jar", "ResourcesServiceTest", ResourceType.FILE, null);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
//RESOURCE_EXIST
Mockito.when(resourcesMapper.existResource("/ResourcesServiceTest1.jar", 0)).thenReturn(true);
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.FILE, null);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg());
//USER_NOT_EXIST
Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null);
logger.info(result.toString());
Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode());
//TENANT_NOT_EXIST
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null);
logger.info(result.toString());
Assert.assertEquals(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST.getMsg(), result.getMsg());
//SUCCESS
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test");
try {
PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(), Mockito.anyString(), true, true)).thenReturn(true);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest1.jar", ResourceType.UDF, null);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testQueryResourceListPaging() {
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
IPage<Resource> resourcePage = new Page<>(1, 10);
resourcePage.setTotal(1);
resourcePage.setRecords(getResourceList());
Mockito.when(resourcesMapper.queryResourcePaging(Mockito.any(Page.class),
Mockito.eq(0), Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"), Mockito.any())).thenReturn(resourcePage);
Result result = resourcesService.queryResourceListPaging(loginUser, -1, ResourceType.FILE, "test", 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
PageInfo pageInfo = (PageInfo) result.getData();
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList()));
}
@Test
public void testQueryResourceList() {
User loginUser = new User();
loginUser.setId(0);
loginUser.setUserType(UserType.ADMIN_USER);
Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0)).thenReturn(getResourceList());
Map<String, Object> result = resourcesService.queryResourceList(loginUser, ResourceType.FILE);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<Resource> resourceList = (List<Resource>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(resourceList));
}
@Test
public void testDelete() {
User loginUser = new User();
loginUser.setId(0);
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
try {
// HDFS_NOT_STARTUP
Result result = resourcesService.delete(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_NOT_EXIST
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
result = resourcesService.delete(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
// USER_NO_OPERATION_PERM
result = resourcesService.delete(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//TENANT_NOT_EXIST
loginUser.setUserType(UserType.ADMIN_USER);
loginUser.setTenantId(2);
Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(loginUser);
result = resourcesService.delete(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST.getMsg(), result.getMsg());
//SUCCESS
loginUser.setTenantId(1);
Mockito.when(hadoopUtils.delete(Mockito.anyString(), Mockito.anyBoolean())).thenReturn(true);
Mockito.when(processDefinitionMapper.listResources()).thenReturn(getResources());
Mockito.when(resourcesMapper.deleteIds(Mockito.any())).thenReturn(1);
Mockito.when(resourceUserMapper.deleteResourceUserArray(Mockito.anyInt(), Mockito.any())).thenReturn(1);
result = resourcesService.delete(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
} catch (Exception e) {
logger.error("delete error", e);
Assert.assertTrue(false);
}
}
@Test
public void testVerifyResourceName() {
User user = new User();
user.setId(1);
Mockito.when(resourcesMapper.existResource("/ResourcesServiceTest.jar", 0)).thenReturn(true);
Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg());
//TENANT_NOT_EXIST
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
String unExistFullName = "/test.jar";
try {
Mockito.when(hadoopUtils.exists(unExistFullName)).thenReturn(false);
} catch (IOException e) {
logger.error("hadoop error", e);
}
result = resourcesService.verifyResourceName("/test.jar", ResourceType.FILE, user);
logger.info(result.toString());
Assert.assertEquals(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST.getMsg(), result.getMsg());
//RESOURCE_FILE_EXIST
user.setTenantId(1);
try {
Mockito.when(hadoopUtils.exists("test")).thenReturn(true);
} catch (IOException e) {
logger.error("hadoop error", e);
}
PowerMockito.when(HadoopUtils.getHdfsResourceFileName("123", "test1")).thenReturn("test");
result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user);
logger.info(result.toString());
Assert.assertTrue(Status.RESOURCE_EXIST.getCode() == result.getCode());
//SUCCESS
result = resourcesService.verifyResourceName("test2", ResourceType.FILE, user);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testReadResource() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
//HDFS_NOT_STARTUP
Result result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_NOT_EXIST
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
result = resourcesService.readResource(2, 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//RESOURCE_SUFFIX_NOT_SUPPORT_VIEW
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class");
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg());
//USER_NOT_EXIST
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar");
PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar");
result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode());
//TENANT_NOT_EXIST
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST.getMsg(), result.getMsg());
//RESOURCE_FILE_NOT_EXIST
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
try {
Mockito.when(hadoopUtils.exists(Mockito.anyString())).thenReturn(false);
} catch (IOException e) {
logger.error("hadoop error", e);
}
result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertTrue(Status.RESOURCE_FILE_NOT_EXIST.getCode() == result.getCode());
//SUCCESS
try {
Mockito.when(hadoopUtils.exists(null)).thenReturn(true);
Mockito.when(hadoopUtils.catFile(null, 1, 10)).thenReturn(getContent());
} catch (IOException e) {
logger.error("hadoop error", e);
}
result = resourcesService.readResource(1, 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testOnlineCreateResource() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
PowerMockito.when(HadoopUtils.getHdfsResDir("hdfsdDir")).thenReturn("hdfsDir");
PowerMockito.when(HadoopUtils.getHdfsUdfDir("udfDir")).thenReturn("udfDir");
User user = getUser();
//HDFS_NOT_STARTUP
Result result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_SUFFIX_NOT_SUPPORT_VIEW
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class");
result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg());
//RuntimeException
try {
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar");
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/");
} catch (RuntimeException ex) {
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), ex.getMessage());
}
//SUCCESS
Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test");
PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true);
result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testUpdateResourceContent() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
// HDFS_NOT_STARTUP
Result result = resourcesService.updateResourceContent(1, "content");
logger.info(result.toString());
Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg());
//RESOURCE_NOT_EXIST
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
result = resourcesService.updateResourceContent(2, "content");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
//RESOURCE_SUFFIX_NOT_SUPPORT_VIEW
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class");
result = resourcesService.updateResourceContent(1, "content");
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg());
//USER_NOT_EXIST
PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar");
PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar");
result = resourcesService.updateResourceContent(1, "content");
logger.info(result.toString());
Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode());
//TENANT_NOT_EXIST
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
result = resourcesService.updateResourceContent(1, "content");
logger.info(result.toString());
Assert.assertTrue(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST.getCode() == result.getCode());
//SUCCESS
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test");
PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true);
result = resourcesService.updateResourceContent(1, "content");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testDownloadResource() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
org.springframework.core.io.Resource resourceMock = Mockito.mock(org.springframework.core.io.Resource.class);
try {
//resource null
org.springframework.core.io.Resource resource = resourcesService.downloadResource(1);
Assert.assertNull(resource);
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource());
PowerMockito.when(org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(Mockito.any())).thenReturn(resourceMock);
resource = resourcesService.downloadResource(1);
Assert.assertNotNull(resource);
} catch (Exception e) {
logger.error("DownloadResource error", e);
Assert.assertTrue(false);
}
}
@Test
public void testUnauthorizedFile() {
User user = getUser();
//USER_NO_OPERATION_PERM
Map<String, Object> result = resourcesService.unauthorizedFile(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//SUCCESS
user.setUserType(UserType.ADMIN_USER);
Mockito.when(resourcesMapper.queryResourceExceptUserId(1)).thenReturn(getResourceList());
result = resourcesService.unauthorizedFile(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<Resource> resources = (List<Resource>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(resources));
}
@Test
public void testUnauthorizedUDFFunction() {
User user = getUser();
//USER_NO_OPERATION_PERM
Map<String, Object> result = resourcesService.unauthorizedUDFFunction(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//SUCCESS
user.setUserType(UserType.ADMIN_USER);
Mockito.when(udfFunctionMapper.queryUdfFuncExceptUserId(1)).thenReturn(getUdfFuncList());
result = resourcesService.unauthorizedUDFFunction(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<UdfFunc> udfFuncs = (List<UdfFunc>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs));
}
@Test
public void testAuthorizedUDFFunction() {
User user = getUser();
//USER_NO_OPERATION_PERM
Map<String, Object> result = resourcesService.authorizedUDFFunction(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//SUCCESS
user.setUserType(UserType.ADMIN_USER);
Mockito.when(udfFunctionMapper.queryAuthedUdfFunc(1)).thenReturn(getUdfFuncList());
result = resourcesService.authorizedUDFFunction(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<UdfFunc> udfFuncs = (List<UdfFunc>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs));
}
@Test
public void testAuthorizedFile() {
User user = getUser();
//USER_NO_OPERATION_PERM
Map<String, Object> result = resourcesService.authorizedFile(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//SUCCESS
user.setUserType(UserType.ADMIN_USER);
List<Integer> resIds = new ArrayList<>();
resIds.add(1);
Mockito.when(resourceUserMapper.queryResourcesIdListByUserIdAndPerm(Mockito.anyInt(), Mockito.anyInt())).thenReturn(resIds);
Mockito.when(resourcesMapper.queryResourceListById(Mockito.any())).thenReturn(getResourceList());
result = resourcesService.authorizedFile(user, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<Resource> resources = (List<Resource>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(resources));
}
@Test
public void testCatFile() {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
//SUCCESS
try {
Mockito.when(hadoopUtils.exists(null)).thenReturn(true);
Mockito.when(hadoopUtils.catFile(null, 1, 10)).thenReturn(getContent());
List<String> list = hadoopUtils.catFile(null, 1, 10);
Assert.assertNotNull(list);
} catch (IOException e) {
logger.error("hadoop error", e);
}
}
private List<Resource> getResourceList() {
List<Resource> resources = new ArrayList<>();
resources.add(getResource());
return resources;
}
private Tenant getTenant() {
Tenant tenant = new Tenant();
tenant.setTenantCode("123");
return tenant;
}
private Resource getResource() {
Resource resource = new Resource();
resource.setPid(-1);
resource.setUserId(1);
resource.setDescription("ResourcesServiceTest.jar");
resource.setAlias("ResourcesServiceTest.jar");
resource.setFullName("/ResourcesServiceTest.jar");
resource.setType(ResourceType.FILE);
return resource;
}
private Resource getUdfResource() {
Resource resource = new Resource();
resource.setUserId(1);
resource.setDescription("udfTest");
resource.setAlias("udfTest.jar");
resource.setFullName("/udfTest.jar");
resource.setType(ResourceType.UDF);
return resource;
}
private UdfFunc getUdfFunc() {
UdfFunc udfFunc = new UdfFunc();
udfFunc.setId(1);
return udfFunc;
}
private List<UdfFunc> getUdfFuncList() {
List<UdfFunc> udfFuncs = new ArrayList<>();
udfFuncs.add(getUdfFunc());
return udfFuncs;
}
private User getUser() {
User user = new User();
user.setId(1);
user.setTenantId(1);
return user;
}
private List<String> getContent() {
List<String> contentList = new ArrayList<>();
contentList.add("test");
return contentList;
}
private List<Map<String, Object>> getResources() {
List<Map<String, Object>> resources = new ArrayList<>();
Map<String, Object> resource = new HashMap<>();
resource.put("id", 1);
resource.put("resource_ids", "1");
resources.add(resource);
return resources;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 6,820 | [Feature][API] Ordinary users can also share resource data source projects with others | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Ordinary users can also share resource data source projects with others
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/6820 | https://github.com/apache/dolphinscheduler/pull/7929 | 0c353d69e29a0b562ebb6938d345fc1b4589b781 | 82d04f1924692db83461b8c7800f2ab7228825c1 | "2021-11-11T16:24:04Z" | java | "2022-01-15T01:08:03Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.UsersServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.commons.collections.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
/**
* users service test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class UsersServiceTest {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class);
@InjectMocks
private UsersServiceImpl usersService;
@Mock
private UserMapper userMapper;
@Mock
private AccessTokenMapper accessTokenMapper;
@Mock
private TenantMapper tenantMapper;
@Mock
private ResourceMapper resourceMapper;
@Mock
private AlertGroupMapper alertGroupMapper;
@Mock
private DataSourceUserMapper datasourceUserMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ResourceUserMapper resourceUserMapper;
@Mock
private UDFUserMapper udfUserMapper;
@Mock
private ProjectMapper projectMapper;
private String queueName = "UsersServiceTestQueue";
@Before
public void before() {
}
@After
public void after() {
}
@Test
public void testCreateUserForLdap() {
String userName = "user1";
String email = "user1@ldap.com";
User user = usersService.createUser(UserType.ADMIN_USER, userName, email);
Assert.assertNotNull(user);
}
@Test
public void testCreateUser() {
User user = new User();
user.setUserType(UserType.ADMIN_USER);
String userName = "userTest0001~";
String userPassword = "userTest";
String email = "123@qq.com";
int tenantId = Integer.MAX_VALUE;
String phone = "13456432345";
int state = 1;
try {
//userName error
Map<String, Object> result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userName = "userTest0001";
userPassword = "userTest000111111111111111";
//password error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userPassword = "userTest0001";
email = "1q.com";
//email error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
email = "122222@qq.com";
phone = "2233";
//phone error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
phone = "13456432345";
//tenantId not exists
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
//success
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
result = usersService.createUser(user, userName, userPassword, email, 1, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error(Status.CREATE_USER_ERROR.getMsg(), e);
Assert.assertTrue(false);
}
}
@Test
public void testQueryUser() {
String userName = "userTest0001";
String userPassword = "userTest0001";
when(userMapper.queryUserByNamePassword(userName, EncryptionUtils.getMd5(userPassword))).thenReturn(getGeneralUser());
User queryUser = usersService.queryUser(userName, userPassword);
logger.info(queryUser.toString());
Assert.assertTrue(queryUser != null);
}
@Test
public void testSelectByIds() {
List<Integer> ids = new ArrayList<>();
List<User> users = usersService.queryUser(ids);
Assert.assertTrue(users.isEmpty());
ids.add(1);
List<User> userList = new ArrayList<>();
userList.add(new User());
when(userMapper.selectByIds(ids)).thenReturn(userList);
List<User> userList1 = usersService.queryUser(ids);
Assert.assertFalse(userList1.isEmpty());
}
@Test
public void testGetUserIdByName() {
User user = new User();
user.setId(1);
user.setUserType(UserType.ADMIN_USER);
user.setUserName("test_user");
//user name null
int userId = usersService.getUserIdByName("");
Assert.assertEquals(0, userId);
//user not exist
when(usersService.queryUser(user.getUserName())).thenReturn(null);
int userNotExistId = usersService.getUserIdByName(user.getUserName());
Assert.assertEquals(-1, userNotExistId);
//user exist
when(usersService.queryUser(user.getUserName())).thenReturn(user);
int userExistId = usersService.getUserIdByName(user.getUserName());
Assert.assertEquals(user.getId(), userExistId);
}
@Test
public void testQueryUserList() {
User user = new User();
//no operate
Map<String, Object> result = usersService.queryUserList(user);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
user.setUserType(UserType.ADMIN_USER);
when(userMapper.selectList(null)).thenReturn(getUserList());
result = usersService.queryUserList(user);
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(userList.size() > 0);
}
@Test
public void testQueryUserListPage() {
User user = new User();
IPage<User> page = new Page<>(1, 10);
page.setRecords(getUserList());
when(userMapper.queryUserPaging(any(Page.class), eq("userTest"))).thenReturn(page);
//no operate
Result result = usersService.queryUserList(user, "userTest", 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), (int) result.getCode());
//success
user.setUserType(UserType.ADMIN_USER);
result = usersService.queryUserList(user, "userTest", 1, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
PageInfo<User> pageInfo = (PageInfo<User>) result.getData();
Assert.assertTrue(pageInfo.getTotalList().size() > 0);
}
@Test
public void testUpdateUser() {
String userName = "userTest0001";
String userPassword = "userTest0001";
try {
//user not exist
Map<String, Object> result = usersService.updateUser(getLoginUser(), 0, userName, userPassword, "3443@qq.com", 1, "13457864543", "queue", 1);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
logger.info(result.toString());
//success
when(userMapper.selectById(1)).thenReturn(getUser());
result = usersService.updateUser(getLoginUser(), 1, userName, userPassword, "32222s@qq.com", 1, "13457864543", "queue", 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("update user error", e);
Assert.assertTrue(false);
}
}
@Test
public void testDeleteUserById() {
User loginUser = new User();
try {
when(userMapper.queryTenantCodeByUserId(1)).thenReturn(getUser());
when(userMapper.selectById(1)).thenReturn(getUser());
when(accessTokenMapper.deleteAccessTokenByUserId(1)).thenReturn(0);
//no operate
Map<String, Object> result = usersService.deleteUserById(loginUser, 3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
// user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.deleteUserById(loginUser, 3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
// user is project owner
Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(Lists.newArrayList(new Project()));
result = usersService.deleteUserById(loginUser, 1);
Assert.assertEquals(Status.TRANSFORM_PROJECT_OWNERSHIP, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(null);
result = usersService.deleteUserById(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("delete user error", e);
Assert.assertTrue(false);
}
}
@Test
public void testGrantProject() {
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
String projectIds = "100000,120000";
Map<String, Object> result = usersService.grantProject(loginUser, 1, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantProject(loginUser, 2, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
result = usersService.grantProject(loginUser, 1, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantProjectByCode() {
// Mock Project, User
final long projectCode = 1L;
final int projectCreator = 1;
final int authorizer = 100;
Mockito.when(this.userMapper.selectById(authorizer)).thenReturn(this.getUser());
Mockito.when(this.userMapper.selectById(projectCreator)).thenReturn(this.getUser());
Mockito.when(this.projectMapper.queryByCode(projectCode)).thenReturn(this.getProject());
// ERROR: USER_NOT_EXIST
User loginUser = new User();
Map<String, Object> result = this.usersService.grantProjectByCode(loginUser, 999, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
// ERROR: PROJECT_NOT_FOUNT
result = this.usersService.grantProjectByCode(loginUser, authorizer, 999);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUND, result.get(Constants.STATUS));
// ERROR: USER_NO_OPERATION_PERM
loginUser.setId(999);
loginUser.setUserType(UserType.GENERAL_USER);
result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
// SUCCESS: USER IS PROJECT OWNER
loginUser.setId(projectCreator);
loginUser.setUserType(UserType.GENERAL_USER);
result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
// SUCCESS: USER IS ADMINISTRATOR
loginUser.setId(999);
loginUser.setUserType(UserType.ADMIN_USER);
result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testRevokeProject() {
Mockito.when(this.userMapper.selectById(1)).thenReturn(this.getUser());
final long projectCode = 3682329499136L;
// user no permission
User loginUser = new User();
Map<String, Object> result = this.usersService.revokeProject(loginUser, 1, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
// user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = this.usersService.revokeProject(loginUser, 2, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
// success
Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(new Project());
result = this.usersService.revokeProject(loginUser, 1, projectCode);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantResources() {
String resourceIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantResources(loginUser, 1, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantResources(loginUser, 2, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource());
when(resourceUserMapper.deleteResourceUser(1, 0)).thenReturn(1);
result = usersService.grantResources(loginUser, 1, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantUDFFunction() {
String udfIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantUDFFunction(loginUser, 1, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantUDFFunction(loginUser, 2, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
when(udfUserMapper.deleteByUserId(1)).thenReturn(1);
result = usersService.grantUDFFunction(loginUser, 1, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantDataSource() {
String datasourceIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantDataSource(loginUser, 1, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantDataSource(loginUser, 2, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
when(datasourceUserMapper.deleteByUserId(Mockito.anyInt())).thenReturn(1);
result = usersService.grantDataSource(loginUser, 1, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
private User getLoginUser() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
return loginUser;
}
@Test
public void getUserInfo() {
User loginUser = new User();
loginUser.setUserName("admin");
loginUser.setUserType(UserType.ADMIN_USER);
// get admin user
Map<String, Object> result = usersService.getUserInfo(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
User tempUser = (User) result.get(Constants.DATA_LIST);
//check userName
Assert.assertEquals("admin", tempUser.getUserName());
//get general user
loginUser.setUserType(null);
loginUser.setId(1);
when(userMapper.queryDetailsById(1)).thenReturn(getGeneralUser());
when(alertGroupMapper.queryByUserId(1)).thenReturn(getAlertGroups());
result = usersService.getUserInfo(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
tempUser = (User) result.get(Constants.DATA_LIST);
//check userName
Assert.assertEquals("userTest0001", tempUser.getUserName());
}
@Test
public void testQueryAllGeneralUsers() {
User loginUser = new User();
//no operate
Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
when(userMapper.queryAllGeneralUser()).thenReturn(getUserList());
result = usersService.queryAllGeneralUsers(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(userList));
}
@Test
public void testVerifyUserName() {
//not exist user
Result result = usersService.verifyUserName("admin89899");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
//exist user
when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser());
result = usersService.verifyUserName("userTest0001");
logger.info(result.toString());
Assert.assertEquals(Status.USER_NAME_EXIST.getMsg(), result.getMsg());
}
@Test
public void testUnauthorizedUser() {
User loginUser = new User();
when(userMapper.selectList(null)).thenReturn(getUserList());
when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList());
//no operate
Map<String, Object> result = usersService.unauthorizedUser(loginUser, 2);
logger.info(result.toString());
loginUser.setUserType(UserType.ADMIN_USER);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
result = usersService.unauthorizedUser(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testAuthorizedUser() {
User loginUser = new User();
when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList());
//no operate
Map<String, Object> result = usersService.authorizedUser(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.authorizedUser(loginUser, 2);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
logger.info(result.toString());
Assert.assertTrue(CollectionUtils.isNotEmpty(userList));
}
@Test
public void testRegisterUser() {
String userName = "userTest0002~";
String userPassword = "userTest";
String repeatPassword = "userTest";
String email = "123@qq.com";
try {
//userName error
Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userName = "userTest0002";
userPassword = "userTest000111111111111111";
//password error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userPassword = "userTest0002";
email = "1q.com";
//email error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//repeatPassword error
email = "7400@qq.com";
repeatPassword = "userPassword";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
repeatPassword = "userTest0002";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
@Test
public void testActivateUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
String userName = "userTest0002~";
try {
//not admin
Map<String, Object> result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//userName error
user.setUserType(UserType.ADMIN_USER);
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//user not exist
userName = "userTest10013";
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//user state error
userName = "userTest0001";
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
@Test
public void testBatchActivateUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
List<String> userNames = new ArrayList<>();
userNames.add("userTest0001");
userNames.add("userTest0002");
userNames.add("userTest0003~");
userNames.add("userTest0004");
try {
//not admin
Map<String, Object> result = usersService.batchActivateUser(user, userNames);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//batch activate user names
user.setUserType(UserType.ADMIN_USER);
when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser());
when(userMapper.queryByUserNameAccurately("userTest0002")).thenReturn(getDisabledUser());
result = usersService.batchActivateUser(user, userNames);
Map<String, Object> responseData = (Map<String, Object>) result.get(Constants.DATA_LIST);
Map<String, Object> successData = (Map<String, Object>) responseData.get("success");
int totalSuccess = (Integer) successData.get("sum");
Map<String, Object> failedData = (Map<String, Object>) responseData.get("failed");
int totalFailed = (Integer) failedData.get("sum");
Assert.assertEquals(1, totalSuccess);
Assert.assertEquals(3, totalFailed);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
/**
* get disabled user
*/
private User getDisabledUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
user.setState(0);
return user;
}
/**
* Get project
* @return
*/
private Project getProject() {
Project project = new Project();
project.setId(1);
project.setCode(1L);
project.setUserId(1);
project.setName("PJ-001");
project.setPerm(7);
project.setDefCount(0);
project.setInstRunningCount(0);
return project;
}
/**
* get user
*/
private User getGeneralUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
return user;
}
private List<User> getUserList() {
List<User> userList = new ArrayList<>();
userList.add(getGeneralUser());
return userList;
}
/**
* get user
*/
private User getUser() {
User user = new User();
user.setUserType(UserType.ADMIN_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
user.setState(1);
return user;
}
/**
* get tenant
*
* @return tenant
*/
private Tenant getTenant() {
Tenant tenant = new Tenant();
tenant.setId(1);
return tenant;
}
/**
* get resource
*
* @return resource
*/
private Resource getResource() {
Resource resource = new Resource();
resource.setPid(-1);
resource.setUserId(1);
resource.setDescription("ResourcesServiceTest.jar");
resource.setAlias("ResourcesServiceTest.jar");
resource.setFullName("/ResourcesServiceTest.jar");
resource.setType(ResourceType.FILE);
return resource;
}
private List<AlertGroup> getAlertGroups() {
List<AlertGroup> alertGroups = new ArrayList<>();
AlertGroup alertGroup = new AlertGroup();
alertGroups.add(alertGroup);
return alertGroups;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/examples/task_spark_example.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Constants for pydolphinscheduler."""
class ProcessDefinitionReleaseState:
"""Constants for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition` release state."""
ONLINE: str = "ONLINE"
OFFLINE: str = "OFFLINE"
class ProcessDefinitionDefault:
"""Constants default value for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition`."""
PROJECT: str = "project-pydolphin"
TENANT: str = "tenant_pydolphin"
USER: str = "userPythonGateway"
# TODO simple set password same as username
USER_PWD: str = "userPythonGateway"
USER_EMAIL: str = "userPythonGateway@dolphinscheduler.com"
USER_PHONE: str = "11111111111"
USER_STATE: int = 1
QUEUE: str = "queuePythonGateway"
WORKER_GROUP: str = "default"
TIME_ZONE: str = "Asia/Shanghai"
class TaskPriority(str):
"""Constants for task priority."""
HIGHEST = "HIGHEST"
HIGH = "HIGH"
MEDIUM = "MEDIUM"
LOW = "LOW"
LOWEST = "LOWEST"
class TaskFlag(str):
"""Constants for task flag."""
YES = "YES"
NO = "NO"
class TaskTimeoutFlag(str):
"""Constants for task timeout flag."""
CLOSE = "CLOSE"
class TaskType(str):
"""Constants for task type, it will also show you which kind we support up to now."""
SHELL = "SHELL"
HTTP = "HTTP"
PYTHON = "PYTHON"
SQL = "SQL"
SUB_PROCESS = "SUB_PROCESS"
PROCEDURE = "PROCEDURE"
DATAX = "DATAX"
DEPENDENT = "DEPENDENT"
CONDITIONS = "CONDITIONS"
SWITCH = "SWITCH"
FLINK = "FLINK"
class DefaultTaskCodeNum(str):
"""Constants and default value for default task code number."""
DEFAULT = 1
class JavaGatewayDefault(str):
"""Constants and default value for java gateway."""
RESULT_MESSAGE_KEYWORD = "msg"
RESULT_MESSAGE_SUCCESS = "success"
RESULT_STATUS_KEYWORD = "status"
RESULT_STATUS_SUCCESS = "SUCCESS"
RESULT_DATA = "data"
class Delimiter(str):
"""Constants for delimiter."""
BAR = "-"
DASH = "/"
COLON = ":"
UNDERSCORE = "_"
DIRECTION = "->"
class Time(str):
"""Constants for date."""
FMT_STD_DATE = "%Y-%m-%d"
LEN_STD_DATE = 10
FMT_DASH_DATE = "%Y/%m/%d"
FMT_SHORT_DATE = "%Y%m%d"
LEN_SHORT_DATE = 8
FMT_STD_TIME = "%H:%M:%S"
FMT_NO_COLON_TIME = "%H%M%S"
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/engine.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/flink.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Task Flink."""
from typing import Dict, Optional
from pydolphinscheduler.constants import TaskType
from pydolphinscheduler.core.task import Task
from pydolphinscheduler.java_gateway import launch_gateway
class ProgramType(str):
"""Type of program flink runs, for now it just contain `JAVA`, `SCALA` and `PYTHON`."""
JAVA = "JAVA"
SCALA = "SCALA"
PYTHON = "PYTHON"
class FlinkVersion(str):
"""Flink version, for now it just contain `HIGHT` and `LOW`."""
LOW_VERSION = "<1.10"
HIGHT_VERSION = ">=1.10"
class DeployMode(str):
"""Flink deploy mode, for now it just contain `LOCAL` and `CLUSTER`."""
LOCAL = "local"
CLUSTER = "cluster"
class Flink(Task):
"""Task flink object, declare behavior for flink task to dolphinscheduler."""
_task_custom_attr = {
"main_class",
"main_jar",
"deploy_mode",
"flink_version",
"slot",
"task_manager",
"job_manager_memory",
"task_manager_memory",
"app_name",
"program_type",
"parallelism",
"main_args",
"others",
}
def __init__(
self,
name: str,
main_class: str,
main_package: str,
program_type: Optional[ProgramType] = ProgramType.SCALA,
deploy_mode: Optional[DeployMode] = DeployMode.CLUSTER,
flink_version: Optional[FlinkVersion] = FlinkVersion.LOW_VERSION,
app_name: Optional[str] = None,
job_manager_memory: Optional[str] = "1G",
task_manager_memory: Optional[str] = "2G",
slot: Optional[int] = 1,
task_manager: Optional[int] = 2,
parallelism: Optional[int] = 1,
main_args: Optional[str] = None,
others: Optional[str] = None,
*args,
**kwargs
):
super().__init__(name, TaskType.FLINK, *args, **kwargs)
self.main_class = main_class
self.main_package = main_package
self.program_type = program_type
self.deploy_mode = deploy_mode
self.flink_version = flink_version
self.app_name = app_name
self.job_manager_memory = job_manager_memory
self.task_manager_memory = task_manager_memory
self.slot = slot
self.task_manager = task_manager
self.parallelism = parallelism
self.main_args = main_args
self.others = others
self._resource = {}
@property
def main_jar(self) -> Dict:
"""Return main package of dict."""
resource_info = self.get_resource_info(self.program_type, self.main_package)
return {"id": resource_info.get("id")}
def get_resource_info(self, program_type, main_package) -> Dict:
"""Get resource info from java gateway, contains resource id, name."""
if not self._resource:
self._resource = launch_gateway().entry_point.getResourcesFileInfo(
program_type,
main_package,
)
return self._resource
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/spark.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/tests/core/test_engine.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_datax.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test Task DataX."""
from unittest.mock import patch
import pytest
from pydolphinscheduler.tasks.datax import CustomDataX, DataX
@patch(
"pydolphinscheduler.core.database.Database.get_database_info",
return_value=({"id": 1, "type": "MYSQL"}),
)
def test_datax_get_define(mock_datasource):
"""Test task datax function get_define."""
code = 123
version = 1
name = "test_datax_get_define"
command = "select name from test_source_table_name"
datasource_name = "test_datasource"
datatarget_name = "test_datatarget"
target_table = "test_target_table_name"
expect = {
"code": code,
"name": name,
"version": 1,
"description": None,
"delayTime": 0,
"taskType": "DATAX",
"taskParams": {
"customConfig": 0,
"dsType": "MYSQL",
"dataSource": 1,
"dtType": "MYSQL",
"dataTarget": 1,
"sql": command,
"targetTable": target_table,
"jobSpeedByte": 0,
"jobSpeedRecord": 1000,
"xms": 1,
"xmx": 1,
"preStatements": [],
"postStatements": [],
"localParams": [],
"resourceList": [],
"dependence": {},
"conditionResult": {"successNode": [""], "failedNode": [""]},
"waitStartTimeout": {},
},
"flag": "YES",
"taskPriority": "MEDIUM",
"workerGroup": "default",
"failRetryTimes": 0,
"failRetryInterval": 1,
"timeoutFlag": "CLOSE",
"timeoutNotifyStrategy": None,
"timeout": 0,
}
with patch(
"pydolphinscheduler.core.task.Task.gen_code_and_version",
return_value=(code, version),
):
task = DataX(name, datasource_name, datatarget_name, command, target_table)
assert task.get_define() == expect
@pytest.mark.parametrize("json_template", ["json_template"])
def test_custom_datax_get_define(json_template):
"""Test task custom datax function get_define."""
code = 123
version = 1
name = "test_custom_datax_get_define"
expect = {
"code": code,
"name": name,
"version": 1,
"description": None,
"delayTime": 0,
"taskType": "DATAX",
"taskParams": {
"customConfig": 1,
"json": json_template,
"xms": 1,
"xmx": 1,
"localParams": [],
"resourceList": [],
"dependence": {},
"conditionResult": {"successNode": [""], "failedNode": [""]},
"waitStartTimeout": {},
},
"flag": "YES",
"taskPriority": "MEDIUM",
"workerGroup": "default",
"failRetryTimes": 0,
"failRetryInterval": 1,
"timeoutFlag": "CLOSE",
"timeoutNotifyStrategy": None,
"timeout": 0,
}
with patch(
"pydolphinscheduler.core.task.Task.gen_code_and_version",
return_value=(code, version),
):
task = CustomDataX(name, json_template)
print(task.get_define())
print(expect)
assert task.get_define() == expect
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_flink.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test Task Flink."""
from unittest.mock import patch
from pydolphinscheduler.tasks.flink import DeployMode, Flink, FlinkVersion, ProgramType
@patch(
"pydolphinscheduler.tasks.flink.Flink.get_resource_info",
return_value=({"id": 1, "name": "test"}),
)
def test_flink_get_define(mock_resource):
"""Test task flink function get_define."""
code = 123
version = 1
name = "test_flink_get_define"
main_class = "org.apache.flink.test_main_class"
main_package = "test_main_package"
program_type = ProgramType.JAVA
deploy_mode = DeployMode.LOCAL
expect = {
"code": code,
"name": name,
"version": 1,
"description": None,
"delayTime": 0,
"taskType": "FLINK",
"taskParams": {
"mainClass": main_class,
"mainJar": {
"id": 1,
},
"programType": program_type,
"deployMode": deploy_mode,
"flinkVersion": FlinkVersion.LOW_VERSION,
"slot": 1,
"parallelism": 1,
"taskManager": 2,
"jobManagerMemory": "1G",
"taskManagerMemory": "2G",
"appName": None,
"mainArgs": None,
"others": None,
"localParams": [],
"resourceList": [],
"dependence": {},
"conditionResult": {"successNode": [""], "failedNode": [""]},
"waitStartTimeout": {},
},
"flag": "YES",
"taskPriority": "MEDIUM",
"workerGroup": "default",
"failRetryTimes": 0,
"failRetryInterval": 1,
"timeoutFlag": "CLOSE",
"timeoutNotifyStrategy": None,
"timeout": 0,
}
with patch(
"pydolphinscheduler.core.task.Task.gen_code_and_version",
return_value=(code, version),
):
task = Flink(name, main_class, main_package, program_type, deploy_mode)
assert task.get_define() == expect
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_spark.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,346 | [Feature][Python] Add workflow as code task type spark | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type spark. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7346 | https://github.com/apache/dolphinscheduler/pull/7968 | f4a8502d436ef7c6beff3b5290d7daf93a092318 | 027af091c736a801df3082ea5323d6821a7d64dd | "2021-12-12T13:10:40Z" | java | "2022-01-15T15:24:55Z" | dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.QueueService;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.service.TaskDefinitionService;
import org.apache.dolphinscheduler.api.service.TenantService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import org.springframework.context.annotation.ComponentScan;
import py4j.GatewayServer;
import org.apache.commons.collections.CollectionUtils;
@SpringBootApplication
@ComponentScan(value = "org.apache.dolphinscheduler")
public class PythonGatewayServer extends SpringBootServletInitializer {
private static final Logger logger = LoggerFactory.getLogger(PythonGatewayServer.class);
private static final WarningType DEFAULT_WARNING_TYPE = WarningType.NONE;
private static final int DEFAULT_WARNING_GROUP_ID = 0;
private static final FailureStrategy DEFAULT_FAILURE_STRATEGY = FailureStrategy.CONTINUE;
private static final Priority DEFAULT_PRIORITY = Priority.MEDIUM;
private static final Long DEFAULT_ENVIRONMENT_CODE = -1L;
private static final TaskDependType DEFAULT_TASK_DEPEND_TYPE = TaskDependType.TASK_POST;
private static final RunMode DEFAULT_RUN_MODE = RunMode.RUN_MODE_SERIAL;
private static final int DEFAULT_DRY_RUN = 0;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProjectService projectService;
@Autowired
private TenantService tenantService;
@Autowired
private ExecutorService executorService;
@Autowired
private ProcessDefinitionService processDefinitionService;
@Autowired
private TaskDefinitionService taskDefinitionService;
@Autowired
private UsersService usersService;
@Autowired
private QueueService queueService;
@Autowired
private ResourcesService resourceService;
@Autowired
private ProjectMapper projectMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private SchedulerService schedulerService;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
// TODO replace this user to build in admin user if we make sure build in one could not be change
private final User dummyAdminUser = new User() {
{
setId(Integer.MAX_VALUE);
setUserName("dummyUser");
setUserType(UserType.ADMIN_USER);
}
};
private final Queue queuePythonGateway = new Queue() {
{
setId(Integer.MAX_VALUE);
setQueueName("queuePythonGateway");
}
};
public String ping() {
return "PONG";
}
// TODO Should we import package in python client side? utils package can but service can not, why
// Core api
public Map<String, Object> genTaskCodeList(Integer genNum) {
return taskDefinitionService.genTaskCodeList(genNum);
}
public Map<String, Long> getCodeAndVersion(String projectName, String taskName) throws CodeGenerateUtils.CodeGenerateException {
Project project = projectMapper.queryByName(projectName);
Map<String, Long> result = new HashMap<>();
// project do not exists, mean task not exists too, so we should directly return init value
if (project == null) {
result.put("code", CodeGenerateUtils.getInstance().genCode());
result.put("version", 0L);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), taskName);
if (taskDefinition == null) {
result.put("code", CodeGenerateUtils.getInstance().genCode());
result.put("version", 0L);
} else {
result.put("code", taskDefinition.getCode());
result.put("version", (long) taskDefinition.getVersion());
}
return result;
}
/**
* create or update process definition.
* If process definition do not exists in Project=`projectCode` would create a new one
* If process definition already exists in Project=`projectCode` would update it
*
* @param userName user name who create or update process definition
* @param projectName project name which process definition belongs to
* @param name process definition name
* @param description description
* @param globalParams global params
* @param schedule schedule for process definition, will not set schedule if null,
* and if would always fresh exists schedule if not null
* @param locations locations json object about all tasks
* @param timeout timeout for process definition working, if running time longer than timeout,
* task will mark as fail
* @param workerGroup run task in which worker group
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return create result code
*/
public Long createOrUpdateProcessDefinition(String userName,
String projectName,
String name,
String description,
String globalParams,
String schedule,
String locations,
int timeout,
String workerGroup,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType) {
User user = usersService.queryUser(userName);
Project project = (Project) projectService.queryByName(user, projectName).get(Constants.DATA_LIST);
long projectCode = project.getCode();
ProcessDefinition processDefinition = getProcessDefinition(user, projectCode, name);
long processDefinitionCode;
// create or update process definition
if (processDefinition != null) {
processDefinitionCode = processDefinition.getCode();
// make sure process definition offline which could edit
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.OFFLINE);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(user, projectCode, name, processDefinitionCode, description, globalParams,
locations, timeout, tenantCode, taskRelationJson, taskDefinitionJson, executionType);
} else {
Map<String, Object> result = processDefinitionService.createProcessDefinition(user, projectCode, name, description, globalParams,
locations, timeout, tenantCode, taskRelationJson, taskDefinitionJson, executionType);
processDefinition = (ProcessDefinition) result.get(Constants.DATA_LIST);
processDefinitionCode = processDefinition.getCode();
}
// Fresh process definition schedule
if (schedule != null) {
createOrUpdateSchedule(user, projectCode, processDefinitionCode, schedule, workerGroup);
}
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.ONLINE);
return processDefinitionCode;
}
/**
* get process definition
* @param user user who create or update schedule
* @param projectCode project which process definition belongs to
* @param processDefinitionName process definition name
*/
private ProcessDefinition getProcessDefinition(User user, long projectCode, String processDefinitionName) {
Map<String, Object> verifyProcessDefinitionExists = processDefinitionService.verifyProcessDefinitionName(user, projectCode, processDefinitionName);
Status verifyStatus = (Status) verifyProcessDefinitionExists.get(Constants.STATUS);
ProcessDefinition processDefinition = null;
if (verifyStatus == Status.PROCESS_DEFINITION_NAME_EXIST) {
processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName);
} else if (verifyStatus != Status.SUCCESS) {
String msg = "Verify process definition exists status is invalid, neither SUCCESS or PROCESS_DEFINITION_NAME_EXIST.";
logger.error(msg);
throw new RuntimeException(msg);
}
return processDefinition;
}
/**
* create or update process definition schedule.
* It would always use latest schedule define in workflow-as-code, and set schedule online when
* it's not null
*
* @param user user who create or update schedule
* @param projectCode project which process definition belongs to
* @param processDefinitionCode process definition code
* @param schedule schedule expression
* @param workerGroup work group
*/
private void createOrUpdateSchedule(User user,
long projectCode,
long processDefinitionCode,
String schedule,
String workerGroup) {
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
// create or update schedule
int scheduleId;
if (scheduleObj == null) {
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.ONLINE);
Map<String, Object> result = schedulerService.insertSchedule(user, projectCode, processDefinitionCode, schedule, DEFAULT_WARNING_TYPE,
DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE);
scheduleId = (int) result.get("scheduleId");
} else {
scheduleId = scheduleObj.getId();
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.OFFLINE);
schedulerService.updateSchedule(user, projectCode, scheduleId, schedule, DEFAULT_WARNING_TYPE,
DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE);
}
schedulerService.setScheduleState(user, projectCode, scheduleId, ReleaseState.ONLINE);
}
public void execProcessInstance(String userName,
String projectName,
String processDefinitionName,
String cronTime,
String workerGroup,
Integer timeout
) {
User user = usersService.queryUser(userName);
Project project = projectMapper.queryByName(projectName);
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(project.getCode(), processDefinitionName);
// make sure process definition online
processDefinitionService.releaseProcessDefinition(user, project.getCode(), processDefinition.getCode(), ReleaseState.ONLINE);
executorService.execProcessInstance(user,
project.getCode(),
processDefinition.getCode(),
cronTime,
null,
DEFAULT_FAILURE_STRATEGY,
null,
DEFAULT_TASK_DEPEND_TYPE,
DEFAULT_WARNING_TYPE,
DEFAULT_WARNING_GROUP_ID,
DEFAULT_RUN_MODE,
DEFAULT_PRIORITY,
workerGroup,
DEFAULT_ENVIRONMENT_CODE,
timeout,
null,
null,
DEFAULT_DRY_RUN
);
}
// side object
public Map<String, Object> createProject(String userName, String name, String desc) {
User user = usersService.queryUser(userName);
return projectService.createProject(user, name, desc);
}
public Map<String, Object> createQueue(String name, String queueName) {
Result<Object> verifyQueueExists = queueService.verifyQueue(name, queueName);
if (verifyQueueExists.getCode() == 0) {
return queueService.createQueue(dummyAdminUser, name, queueName);
} else {
Map<String, Object> result = new HashMap<>();
// TODO function putMsg do not work here
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
}
public Map<String, Object> createTenant(String tenantCode, String desc, String queueName) throws Exception {
if (tenantService.checkTenantExists(tenantCode)) {
Map<String, Object> result = new HashMap<>();
// TODO function putMsg do not work here
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
} else {
Result<Object> verifyQueueExists = queueService.verifyQueue(queueName, queueName);
if (verifyQueueExists.getCode() == 0) {
// TODO why create do not return id?
queueService.createQueue(dummyAdminUser, queueName, queueName);
}
Map<String, Object> result = queueService.queryQueueName(queueName);
List<Queue> queueList = (List<Queue>) result.get(Constants.DATA_LIST);
Queue queue = queueList.get(0);
return tenantService.createTenant(dummyAdminUser, tenantCode, queue.getId(), desc);
}
}
public void createUser(String userName,
String userPassword,
String email,
String phone,
String tenantCode,
String queue,
int state) {
User user = usersService.queryUser(userName);
if (Objects.isNull(user)) {
Map<String, Object> tenantResult = tenantService.queryByTenantCode(tenantCode);
Tenant tenant = (Tenant) tenantResult.get(Constants.DATA_LIST);
usersService.createUser(userName, userPassword, email, tenant.getId(), phone, queue, state);
}
}
/**
* Get datasource by given datasource name. It return map contain datasource id, type, name.
* Useful in Python API create sql task which need datasource information.
*
* @param datasourceName user who create or update schedule
*/
public Map<String, Object> getDatasourceInfo(String datasourceName) {
Map<String, Object> result = new HashMap<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(datasourceName);
if (dataSourceList == null || dataSourceList.isEmpty()) {
String msg = String.format("Can not find any datasource by name %s", datasourceName);
logger.error(msg);
throw new IllegalArgumentException(msg);
} else if (dataSourceList.size() > 1) {
String msg = String.format("Get more than one datasource by name %s", datasourceName);
logger.error(msg);
throw new IllegalArgumentException(msg);
} else {
DataSource dataSource = dataSourceList.get(0);
result.put("id", dataSource.getId());
result.put("type", dataSource.getType().name());
result.put("name", dataSource.getName());
}
return result;
}
/**
* Get processDefinition by given processDefinitionName name. It return map contain processDefinition id, name, code.
* Useful in Python API create subProcess task which need processDefinition information.
*
* @param userName user who create or update schedule
* @param projectName project name which process definition belongs to
* @param processDefinitionName process definition name
*/
public Map<String, Object> getProcessDefinitionInfo(String userName, String projectName, String processDefinitionName) {
Map<String, Object> result = new HashMap<>();
User user = usersService.queryUser(userName);
Project project = (Project) projectService.queryByName(user, projectName).get(Constants.DATA_LIST);
long projectCode = project.getCode();
ProcessDefinition processDefinition = getProcessDefinition(user, projectCode, processDefinitionName);
// get process definition info
if (processDefinition != null) {
// make sure process definition online
processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinition.getCode(), ReleaseState.ONLINE);
result.put("id", processDefinition.getId());
result.put("name", processDefinition.getName());
result.put("code", processDefinition.getCode());
} else {
String msg = String.format("Can not find valid process definition by name %s", processDefinitionName);
logger.error(msg);
throw new IllegalArgumentException(msg);
}
return result;
}
/**
* Get project, process definition, task code.
* Useful in Python API create dependent task which need processDefinition information.
*
* @param projectName project name which process definition belongs to
* @param processDefinitionName process definition name
* @param taskName task name
*/
public Map<String, Object> getDependentInfo(String projectName, String processDefinitionName, String taskName) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
if (project == null) {
String msg = String.format("Can not find valid project by name %s", projectName);
logger.error(msg);
throw new IllegalArgumentException(msg);
}
long projectCode = project.getCode();
result.put("projectCode", projectCode);
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName);
if (processDefinition == null) {
String msg = String.format("Can not find valid process definition by name %s", processDefinitionName);
logger.error(msg);
throw new IllegalArgumentException(msg);
}
result.put("processDefinitionCode", processDefinition.getCode());
if (taskName != null) {
TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(projectCode, taskName);
result.put("taskDefinitionCode", taskDefinition.getCode());
}
return result;
}
/**
* Get resource by given program type and full name. It return map contain resource id, name.
* Useful in Python API create flink task which need processDefinition information.
*
* @param programType program type one of SCALA, JAVA and PYTHON
* @param fullName full name of the resource
*/
public Map<String, Object> getResourcesFileInfo(String programType, String fullName) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> resources = resourceService.queryResourceByProgramType(dummyAdminUser, ResourceType.FILE, ProgramType.valueOf(programType));
List<ResourceComponent> resourcesComponent = (List<ResourceComponent>) resources.get(Constants.DATA_LIST);
List<ResourceComponent> namedResources = resourcesComponent.stream().filter(s -> fullName.equals(s.getFullName())).collect(Collectors.toList());
if (CollectionUtils.isEmpty(namedResources)) {
String msg = String.format("Can not find valid resource by program type %s and name %s", programType, fullName);
logger.error(msg);
throw new IllegalArgumentException(msg);
}
result.put("id", namedResources.get(0).getId());
result.put("name", namedResources.get(0).getName());
return result;
}
@PostConstruct
public void run() {
GatewayServer server = new GatewayServer(this);
GatewayServer.turnLoggingOn();
// Start server to accept python client socket
server.start();
}
public static void main(String[] args) {
SpringApplication.run(PythonGatewayServer.class, args);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,000 | [Bug][Api] process update user name is wrong | After admin modifies the workflow of ordinary user Jane, the modified user is still Jane , should be admin
![image](https://user-images.githubusercontent.com/95271106/149276076-f232fbdf-b66a-48a3-9f76-db4315d518bb.png)
_Originally posted by @janeHe13 in https://github.com/apache/dolphinscheduler/issues/7777#issuecomment-1011830937_ | https://github.com/apache/dolphinscheduler/issues/8000 | https://github.com/apache/dolphinscheduler/pull/8063 | 027af091c736a801df3082ea5323d6821a7d64dd | 5bcf7bcce6702dd4828215659c04cb487a70eff0 | "2022-01-13T09:59:37Z" | java | "2022-01-15T15:52:06Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import org.apache.dolphinscheduler.api.dto.DagDataSchedule;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ConditionType;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.TimeoutFlag;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlType;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Lists;
/**
* process definition service impl
*/
@Service
public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionLogMapper processDefinitionLogMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProcessInstanceService processInstanceService;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProcessService processService;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private ProcessTaskRelationLogMapper processTaskRelationLogMapper;
@Autowired
TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private SchedulerService schedulerService;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
/**
* create process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return create result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> createProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
Map<String, Object> checkTaskDefinitions = checkTaskDefinitionList(taskDefinitionLogs, taskDefinitionJson);
if (checkTaskDefinitions.get(Constants.STATUS) != Status.SUCCESS) {
return checkTaskDefinitions;
}
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
Map<String, Object> checkRelationJson = checkTaskRelationList(taskRelationList, taskRelationJson, taskDefinitionLogs);
if (checkRelationJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkRelationJson;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, locations, timeout, loginUser.getId(), tenantId);
processDefinition.setExecutionType(executionType);
return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs);
}
private Map<String, Object> createDagDefine(User loginUser,
List<ProcessTaskRelationLog> taskRelationList,
ProcessDefinition processDefinition,
List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(),
insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
return result;
}
private Map<String, Object> checkTaskDefinitionList(List<TaskDefinitionLog> taskDefinitionLogs, String taskDefinitionJson) {
Map<String, Object> result = new HashMap<>();
try {
if (taskDefinitionLogs.isEmpty()) {
logger.error("taskDefinitionJson invalid: {}", taskDefinitionJson);
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) {
logger.error("task definition {} parameter invalid", taskDefinitionLog.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
private Map<String, Object> checkTaskRelationList(List<ProcessTaskRelationLog> taskRelationList, String taskRelationJson, List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
try {
if (taskRelationList == null || taskRelationList.isEmpty()) {
logger.error("task relation list is null");
putMsg(result, Status.DATA_IS_NOT_VALID, taskRelationJson);
return result;
}
List<ProcessTaskRelation> processTaskRelations = taskRelationList.stream()
.map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class))
.collect(Collectors.toList());
List<TaskNode> taskNodeList = processService.transformTask(processTaskRelations, taskDefinitionLogs);
if (taskNodeList.size() != taskRelationList.size()) {
Set<Long> postTaskCodes = taskRelationList.stream().map(ProcessTaskRelationLog::getPostTaskCode).collect(Collectors.toSet());
Set<Long> taskNodeCodes = taskNodeList.stream().map(TaskNode::getCode).collect(Collectors.toSet());
Collection<Long> codes = CollectionUtils.subtract(postTaskCodes, taskNodeCodes);
if (CollectionUtils.isNotEmpty(codes)) {
logger.error("the task code is not exist");
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, org.apache.commons.lang.StringUtils.join(codes, Constants.COMMA));
return result;
}
}
if (graphHasCycle(taskNodeList)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the task relation json is normal
for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) {
if (processTaskRelationLog.getPostTaskCode() == 0) {
logger.error("the post_task_code or post_task_version can't be zero");
putMsg(result, Status.CHECK_PROCESS_TASK_RELATION_ERROR);
return result;
}
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* query process definition list
*
* @param loginUser login user
* @param projectCode project code
* @return definition list
*/
@Override
public Map<String, Object> queryProcessDefinitionList(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> resourceList = processDefinitionMapper.queryAllDefinitionList(projectCode);
List<DagData> dagDataList = resourceList.stream().map(processService::genDagData).collect(Collectors.toList());
result.put(Constants.DATA_LIST, dagDataList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition simple list
*
* @param loginUser login user
* @param projectCode project code
* @return definition simple list
*/
@Override
public Map<String, Object> queryProcessDefinitionSimpleList(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryAllDefinitionList(projectCode);
ArrayNode arrayNode = JSONUtils.createArrayNode();
for (ProcessDefinition processDefinition : processDefinitions) {
ObjectNode processDefinitionNode = JSONUtils.createObjectNode();
processDefinitionNode.put("id", processDefinition.getId());
processDefinitionNode.put("code", processDefinition.getCode());
processDefinitionNode.put("name", processDefinition.getName());
processDefinitionNode.put("projectCode", processDefinition.getProjectCode());
arrayNode.add(processDefinitionNode);
}
result.put(Constants.DATA_LIST, arrayNode);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectCode project code
* @param searchVal search value
* @param userId user id
* @param pageNo page number
* @param pageSize page size
* @return process definition page
*/
@Override
public Result queryProcessDefinitionListPaging(User loginUser, long projectCode, String searchVal, Integer userId, Integer pageNo, Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(
page, searchVal, userId, project.getCode(), isAdmin(loginUser));
List<ProcessDefinition> records = processDefinitionIPage.getRecords();
for (ProcessDefinition pd : records) {
ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(pd.getCode(), pd.getVersion());
User user = userMapper.selectById(processDefinitionLog.getOperator());
pd.setModifyBy(user.getUserName());
}
processDefinitionIPage.setRecords(records);
PageInfo<ProcessDefinition> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) processDefinitionIPage.getTotal());
pageInfo.setTotalList(processDefinitionIPage.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query detail of process definition
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return process definition detail
*/
@Override
public Map<String, Object> queryProcessDefinitionByCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
} else {
Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId());
if (tenant != null) {
processDefinition.setTenantCode(tenant.getTenantCode());
}
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Map<String, Object> queryProcessDefinitionByName(User loginUser, long projectCode, String name) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, name);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, name);
} else {
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* update process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return update result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateProcessDefinition(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
Map<String, Object> checkTaskDefinitions = checkTaskDefinitionList(taskDefinitionLogs, taskDefinitionJson);
if (checkTaskDefinitions.get(Constants.STATUS) != Status.SUCCESS) {
return checkTaskDefinitions;
}
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
Map<String, Object> checkRelationJson = checkTaskRelationList(taskRelationList, taskRelationJson, taskDefinitionLogs);
if (checkRelationJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkRelationJson;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
}
if (!name.equals(processDefinition.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
}
ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
processDefinition.set(projectCode, name, description, globalParams, locations, timeout, tenantId);
processDefinition.setExecutionType(executionType);
return updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, taskDefinitionLogs);
}
private Map<String, Object> updateDagDefine(User loginUser,
List<ProcessTaskRelationLog> taskRelationList,
ProcessDefinition processDefinition,
ProcessDefinition processDefinitionDeepCopy,
List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
int insertVersion;
if (processDefinition.equals(processDefinitionDeepCopy)) {
insertVersion = processDefinitionDeepCopy.getVersion();
} else {
processDefinition.setUpdateTime(new Date());
insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
}
if (insertVersion == 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectCode project code
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@Override
public Map<String, Object> verifyProcessDefinitionName(User loginUser, long projectCode, String name) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.verifyByDefineName(project.getCode(), name.trim());
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name.trim());
}
return result;
}
/**
* delete process definition by code
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
// Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, code);
return result;
}
// check process instances is already running
List<ProcessInstance> processInstances = processInstanceService.queryByProcessDefineCodeAndStatus(processDefinition.getCode(), Constants.NOT_TERMINATED_STATES);
if (CollectionUtils.isNotEmpty(processInstances)) {
putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_CODE_FAIL, processInstances.size());
return result;
}
// get the timing according to the process definition
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj != null) {
if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(scheduleObj.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
}
}
if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, scheduleObj.getId());
return result;
}
}
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode());
if (deleteRelation == 0) {
logger.warn("The process definition has not relation, it will be delete successfully");
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState release state
* @return release result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> releaseProcessDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code);
if (updateProcess > 0 && schedule != null) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, schedule.getId(), code);
// set status
schedule.setReleaseState(releaseState);
int updateSchedule = scheduleMapper.updateById(schedule);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
}
schedulerService.deleteSchedule(project.getId(), schedule.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* batch export process definition by codes
*/
@Override
public void batchExportProcessDefinitionByCodes(User loginUser, long projectCode, String codes, HttpServletResponse response) {
if (org.apache.commons.lang.StringUtils.isEmpty(codes)) {
return;
}
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return;
}
Set<Long> defineCodeSet = Lists.newArrayList(codes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
return;
}
// check processDefinition exist in project
List<ProcessDefinition> processDefinitionListInProject = processDefinitionList.stream().filter(o -> projectCode == o.getProjectCode()).collect(Collectors.toList());
List<DagDataSchedule> dagDataSchedules = processDefinitionListInProject.stream().map(this::exportProcessDagData).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(dagDataSchedules)) {
downloadProcessDefinitionFile(response, dagDataSchedules);
}
}
/**
* download the process definition file
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<DagDataSchedule> dagDataSchedules) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(JSONUtils.toJsonString(dagDataSchedules).getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
} finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
}
}
/**
* get export process dag data
*
* @param processDefinition process definition
* @return DagDataSchedule
*/
public DagDataSchedule exportProcessDagData(ProcessDefinition processDefinition) {
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode());
DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(processDefinition));
if (scheduleObj != null) {
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
dagDataSchedule.setSchedule(scheduleObj);
}
return dagDataSchedule;
}
/**
* import process definition
*
* @param loginUser login user
* @param projectCode project code
* @param file process metadata json file
* @return import process
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, long projectCode, MultipartFile file) {
Map<String, Object> result = new HashMap<>();
String dagDataScheduleJson = FileUtils.file2String(file);
List<DagDataSchedule> dagDataScheduleList = JSONUtils.toList(dagDataScheduleJson, DagDataSchedule.class);
//check file content
if (CollectionUtils.isEmpty(dagDataScheduleList)) {
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
for (DagDataSchedule dagDataSchedule : dagDataScheduleList) {
if (!checkAndImport(loginUser, projectCode, result, dagDataSchedule)) {
return result;
}
}
return result;
}
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importSqlProcessDefinition(User loginUser, long projectCode, MultipartFile file) {
Map<String, Object> result = new HashMap<>();
String processDefinitionName = file.getOriginalFilename() == null ? file.getName() : file.getOriginalFilename();
int index = processDefinitionName.lastIndexOf(".");
if (index > 0) {
processDefinitionName = processDefinitionName.substring(0, index);
}
processDefinitionName = processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp();
ProcessDefinition processDefinition;
List<TaskDefinitionLog> taskDefinitionList = new ArrayList<>();
List<ProcessTaskRelationLog> processTaskRelationList = new ArrayList<>();
// for Zip Bomb Attack
final int THRESHOLD_ENTRIES = 10000;
final int THRESHOLD_SIZE = 1000000000; // 1 GB
final double THRESHOLD_RATIO = 10;
int totalEntryArchive = 0;
int totalSizeEntry = 0;
// In most cases, there will be only one data source
Map<String, DataSource> dataSourceCache = new HashMap<>(1);
Map<String, Long> taskNameToCode = new HashMap<>(16);
Map<String, List<String>> taskNameToUpstream = new HashMap<>(16);
try (ZipInputStream zIn = new ZipInputStream(file.getInputStream());
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(zIn))) {
// build process definition
processDefinition = new ProcessDefinition(projectCode,
processDefinitionName,
CodeGenerateUtils.getInstance().genCode(),
"",
"[]", null,
0, loginUser.getId(), loginUser.getTenantId());
ZipEntry entry;
while ((entry = zIn.getNextEntry()) != null) {
totalEntryArchive++;
int totalSizeArchive = 0;
if (!entry.isDirectory()) {
StringBuilder sql = new StringBuilder();
String taskName = null;
String datasourceName = null;
List<String> upstreams = Collections.emptyList();
String line;
while ((line = bufferedReader.readLine()) != null) {
int nBytes = line.getBytes(StandardCharsets.UTF_8).length;
totalSizeEntry += nBytes;
totalSizeArchive += nBytes;
long compressionRatio = totalSizeEntry / entry.getCompressedSize();
if (compressionRatio > THRESHOLD_RATIO) {
throw new IllegalStateException("ratio between compressed and uncompressed data is highly suspicious, looks like a Zip Bomb Attack");
}
int commentIndex = line.indexOf("-- ");
if (commentIndex >= 0) {
int colonIndex = line.indexOf(":", commentIndex);
if (colonIndex > 0) {
String key = line.substring(commentIndex + 3, colonIndex).trim().toLowerCase();
String value = line.substring(colonIndex + 1).trim();
switch (key) {
case "name":
taskName = value;
line = line.substring(0, commentIndex);
break;
case "upstream":
upstreams = Arrays.stream(value.split(",")).map(String::trim)
.filter(s -> !"".equals(s)).collect(Collectors.toList());
line = line.substring(0, commentIndex);
break;
case "datasource":
datasourceName = value;
line = line.substring(0, commentIndex);
break;
default:
break;
}
}
}
if (!"".equals(line)) {
sql.append(line).append("\n");
}
}
// import/sql1.sql -> sql1
if (taskName == null) {
taskName = entry.getName();
index = taskName.indexOf("/");
if (index > 0) {
taskName = taskName.substring(index + 1);
}
index = taskName.lastIndexOf(".");
if (index > 0) {
taskName = taskName.substring(0, index);
}
}
DataSource dataSource = dataSourceCache.get(datasourceName);
if (dataSource == null) {
dataSource = queryDatasourceByNameAndUser(datasourceName, loginUser);
}
if (dataSource == null) {
putMsg(result, Status.DATASOURCE_NAME_ILLEGAL);
return result;
}
dataSourceCache.put(datasourceName, dataSource);
TaskDefinitionLog taskDefinition = buildNormalSqlTaskDefinition(taskName, dataSource, sql.substring(0, sql.length() - 1));
taskDefinitionList.add(taskDefinition);
taskNameToCode.put(taskDefinition.getName(), taskDefinition.getCode());
taskNameToUpstream.put(taskDefinition.getName(), upstreams);
}
if (totalSizeArchive > THRESHOLD_SIZE) {
throw new IllegalStateException("the uncompressed data size is too much for the application resource capacity");
}
if (totalEntryArchive > THRESHOLD_ENTRIES) {
throw new IllegalStateException("too much entries in this archive, can lead to inodes exhaustion of the system");
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return result;
}
// build task relation
for (Map.Entry<String, Long> entry : taskNameToCode.entrySet()) {
List<String> upstreams = taskNameToUpstream.get(entry.getKey());
if (CollectionUtils.isEmpty(upstreams)
|| (upstreams.size() == 1 && upstreams.contains("root") && !taskNameToCode.containsKey("root"))) {
ProcessTaskRelationLog processTaskRelation = buildNormalTaskRelation(0, entry.getValue());
processTaskRelationList.add(processTaskRelation);
continue;
}
for (String upstream : upstreams) {
ProcessTaskRelationLog processTaskRelation = buildNormalTaskRelation(taskNameToCode.get(upstream), entry.getValue());
processTaskRelationList.add(processTaskRelation);
}
}
return createDagDefine(loginUser, processTaskRelationList, processDefinition, taskDefinitionList);
}
private ProcessTaskRelationLog buildNormalTaskRelation(long preTaskCode, long postTaskCode) {
ProcessTaskRelationLog processTaskRelation = new ProcessTaskRelationLog();
processTaskRelation.setPreTaskCode(preTaskCode);
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPostTaskCode(postTaskCode);
processTaskRelation.setPostTaskVersion(0);
processTaskRelation.setConditionType(ConditionType.NONE);
processTaskRelation.setName("");
return processTaskRelation;
}
private DataSource queryDatasourceByNameAndUser(String datasourceName, User loginUser) {
if (isAdmin(loginUser)) {
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(datasourceName);
if (CollectionUtils.isNotEmpty(dataSources)) {
return dataSources.get(0);
}
} else {
return dataSourceMapper.queryDataSourceByNameAndUserId(loginUser.getId(), datasourceName);
}
return null;
}
private TaskDefinitionLog buildNormalSqlTaskDefinition(String taskName, DataSource dataSource, String sql) throws CodeGenerateException {
TaskDefinitionLog taskDefinition = new TaskDefinitionLog();
taskDefinition.setName(taskName);
taskDefinition.setFlag(Flag.YES);
SqlParameters sqlParameters = new SqlParameters();
sqlParameters.setType(dataSource.getType().name());
sqlParameters.setDatasource(dataSource.getId());
sqlParameters.setSql(sql.substring(0, sql.length() - 1));
// it may be a query type, but it can only be determined by parsing SQL
sqlParameters.setSqlType(SqlType.NON_QUERY.ordinal());
sqlParameters.setLocalParams(Collections.emptyList());
taskDefinition.setTaskParams(JSONUtils.toJsonString(sqlParameters));
taskDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
taskDefinition.setTaskType(TaskType.SQL.getDesc());
taskDefinition.setFailRetryTimes(0);
taskDefinition.setFailRetryInterval(0);
taskDefinition.setTimeoutFlag(TimeoutFlag.CLOSE);
taskDefinition.setWorkerGroup(DEFAULT_WORKER_GROUP);
taskDefinition.setTaskPriority(Priority.MEDIUM);
taskDefinition.setEnvironmentCode(-1);
taskDefinition.setTimeout(0);
taskDefinition.setDelayTime(0);
taskDefinition.setTimeoutNotifyStrategy(TaskTimeoutStrategy.WARN);
taskDefinition.setVersion(0);
taskDefinition.setResourceIds("");
return taskDefinition;
}
/**
* check and import
*/
private boolean checkAndImport(User loginUser, long projectCode, Map<String, Object> result, DagDataSchedule dagDataSchedule) {
if (!checkImportanceParams(dagDataSchedule, result)) {
return false;
}
ProcessDefinition processDefinition = dagDataSchedule.getProcessDefinition();
//unique check
Map<String, Object> checkResult = verifyProcessDefinitionName(loginUser, projectCode, processDefinition.getName());
if (Status.SUCCESS.equals(checkResult.get(Constants.STATUS))) {
putMsg(result, Status.SUCCESS);
} else {
result.putAll(checkResult);
return false;
}
String processDefinitionName = recursionProcessDefinitionName(projectCode, processDefinition.getName(), 1);
processDefinition.setName(processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp());
processDefinition.setId(0);
processDefinition.setProjectCode(projectCode);
processDefinition.setUserId(loginUser.getId());
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
return false;
}
List<TaskDefinition> taskDefinitionList = dagDataSchedule.getTaskDefinitionList();
Map<Long, Long> taskCodeMap = new HashMap<>();
Date now = new Date();
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
for (TaskDefinition taskDefinition : taskDefinitionList) {
TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog(taskDefinition);
taskDefinitionLog.setName(taskDefinitionLog.getName() + "_import_" + DateUtils.getCurrentTimeStamp());
taskDefinitionLog.setProjectCode(projectCode);
taskDefinitionLog.setUserId(loginUser.getId());
taskDefinitionLog.setVersion(Constants.VERSION_FIRST);
taskDefinitionLog.setCreateTime(now);
taskDefinitionLog.setUpdateTime(now);
taskDefinitionLog.setOperator(loginUser.getId());
taskDefinitionLog.setOperateTime(now);
try {
long code = CodeGenerateUtils.getInstance().genCode();
taskCodeMap.put(taskDefinitionLog.getCode(), code);
taskDefinitionLog.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Task code get error, ", e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code");
return false;
}
taskDefinitionLogList.add(taskDefinitionLog);
}
int insert = taskDefinitionMapper.batchInsert(taskDefinitionLogList);
int logInsert = taskDefinitionLogMapper.batchInsert(taskDefinitionLogList);
if ((logInsert & insert) == 0) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
List<ProcessTaskRelation> taskRelationList = dagDataSchedule.getProcessTaskRelationList();
List<ProcessTaskRelationLog> taskRelationLogList = new ArrayList<>();
for (ProcessTaskRelation processTaskRelation : taskRelationList) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation);
if (taskCodeMap.containsKey(processTaskRelationLog.getPreTaskCode())) {
processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode()));
}
if (taskCodeMap.containsKey(processTaskRelationLog.getPostTaskCode())) {
processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode()));
}
processTaskRelationLog.setPreTaskVersion(Constants.VERSION_FIRST);
processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST);
taskRelationLogList.add(processTaskRelationLog);
}
if (StringUtils.isNotEmpty(processDefinition.getLocations()) && JSONUtils.checkJsonValid(processDefinition.getLocations())) {
ArrayNode arrayNode = JSONUtils.parseArray(processDefinition.getLocations());
ArrayNode newArrayNode = JSONUtils.createArrayNode();
for (int i = 0; i < arrayNode.size(); i++) {
ObjectNode newObjectNode = newArrayNode.addObject();
JsonNode jsonNode = arrayNode.get(i);
Long taskCode = taskCodeMap.get(jsonNode.get("taskCode").asLong());
if (Objects.nonNull(taskCode)) {
newObjectNode.put("taskCode", taskCode);
newObjectNode.set("x", jsonNode.get("x"));
newObjectNode.set("y", jsonNode.get("y"));
}
}
processDefinition.setLocations(newArrayNode.toString());
}
processDefinition.setCreateTime(new Date());
processDefinition.setUpdateTime(new Date());
Map<String, Object> createDagResult = createDagDefine(loginUser, taskRelationLogList, processDefinition, Lists.newArrayList());
if (Status.SUCCESS.equals(createDagResult.get(Constants.STATUS))) {
putMsg(createDagResult, Status.SUCCESS);
} else {
result.putAll(createDagResult);
throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR);
}
Schedule schedule = dagDataSchedule.getSchedule();
if (null != schedule) {
ProcessDefinition newProcessDefinition = processDefinitionMapper.queryByCode(processDefinition.getCode());
schedule.setProcessDefinitionCode(newProcessDefinition.getCode());
schedule.setUserId(loginUser.getId());
schedule.setCreateTime(now);
schedule.setUpdateTime(now);
int scheduleInsert = scheduleMapper.insert(schedule);
if (0 == scheduleInsert) {
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR);
}
}
return true;
}
/**
* check importance params
*/
private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map<String, Object> result) {
if (dagDataSchedule.getProcessDefinition() == null) {
putMsg(result, Status.DATA_IS_NULL, "ProcessDefinition");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getTaskDefinitionList())) {
putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) {
putMsg(result, Status.DATA_IS_NULL, "ProcessTaskRelationList");
return false;
}
return true;
}
private String recursionProcessDefinitionName(long projectCode, String processDefinitionName, int num) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName);
if (processDefinition != null) {
if (num > 1) {
String str = processDefinitionName.substring(0, processDefinitionName.length() - 3);
processDefinitionName = str + "(" + num + ")";
} else {
processDefinitionName = processDefinition.getName() + "(" + num + ")";
}
} else {
return processDefinitionName;
}
return recursionProcessDefinitionName(projectCode, processDefinitionName, num + 1);
}
/**
* check the process task relation json
*
* @param processTaskRelationJson process task relation json
* @return check result code
*/
@Override
public Map<String, Object> checkProcessNodeList(String processTaskRelationJson) {
Map<String, Object> result = new HashMap<>();
try {
if (processTaskRelationJson == null) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processTaskRelationJson);
return result;
}
List<ProcessTaskRelation> taskRelationList = JSONUtils.toList(processTaskRelationJson, ProcessTaskRelation.class);
// Check whether the task node is normal
List<TaskNode> taskNodes = processService.transformTask(taskRelationList, Lists.newArrayList());
if (CollectionUtils.isEmpty(taskNodes)) {
logger.error("process node info is empty");
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
// check has cycle
if (graphHasCycle(taskNodes)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the process definition json is normal
for (TaskNode taskNode : taskNodes) {
if (!CheckUtils.checkTaskNodeParameters(taskNode)) {
logger.error("task node {} parameter invalid", taskNode.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName());
return result;
}
// check extra params
CheckUtils.checkOtherParams(taskNode.getExtras());
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* get task node details based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param code process definition code
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData.getTaskDefinitionList());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get task node details map based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param codes define codes
* @return task node list
*/
@Override
public Map<String, Object> getNodeListMapByDefinitionCodes(User loginUser, long projectCode, String codes) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
Set<Long> defineCodeSet = Lists.newArrayList(codes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.info("process definition not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
}
HashMap<Long, Project> userProjects = new HashMap<>(Constants.DEFAULT_HASH_MAP_SIZE);
projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId())
.forEach(userProject -> userProjects.put(userProject.getCode(), userProject));
// check processDefinition exist in project
List<ProcessDefinition> processDefinitionListInProject = processDefinitionList.stream()
.filter(o -> userProjects.containsKey(o.getProjectCode())).collect(Collectors.toList());
if (CollectionUtils.isEmpty(processDefinitionListInProject)) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
}
Map<Long, List<TaskDefinition>> taskNodeMap = new HashMap<>();
for (ProcessDefinition processDefinition : processDefinitionListInProject) {
DagData dagData = processService.genDagData(processDefinition);
taskNodeMap.put(processDefinition.getCode(), dagData.getTaskDefinitionList());
}
result.put(Constants.DATA_LIST, taskNodeMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition all by project code
*
* @param loginUser loginUser
* @param projectCode project code
* @return process definitions in the project
*/
@Override
public Map<String, Object> queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryAllDefinitionList(projectCode);
List<DagData> dagDataList = processDefinitions.stream().map(processService::genDagData).collect(Collectors.toList());
result.put(Constants.DATA_LIST, dagDataList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* Encapsulates the TreeView structure
*
* @param projectCode project code
* @param code process definition code
* @param limit limit
* @return tree view json data
*/
@Override
public Map<String, Object> viewTree(long projectCode, long code, Integer limit) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (null == processDefinition || projectCode != processDefinition.getProjectCode()) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
DAG<String, TaskNode, TaskNodeRelation> dag = processService.genDagGraph(processDefinition);
// nodes that is running
Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>();
//nodes that is waiting to run
Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>();
// List of process instances
List<ProcessInstance> processInstanceList = processInstanceService.queryByProcessDefineCode(code, limit);
processInstanceList.forEach(processInstance -> processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())));
List<TaskDefinitionLog> taskDefinitionList = processService.genTaskDefineList(processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()));
Map<Long, TaskDefinitionLog> taskDefinitionMap = taskDefinitionList.stream()
.collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog));
if (limit > processInstanceList.size()) {
limit = processInstanceList.size();
}
TreeViewDto parentTreeViewDto = new TreeViewDto();
parentTreeViewDto.setName("DAG");
parentTreeViewDto.setType("");
parentTreeViewDto.setCode(0L);
// Specify the process definition, because it is a TreeView for a process definition
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime();
parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), processInstance.getProcessDefinitionCode(),
"", processInstance.getState().toString(), processInstance.getStartTime(), endTime, processInstance.getHost(),
DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime())));
}
List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>();
parentTreeViewDtoList.add(parentTreeViewDto);
// Here is the encapsulation task instance
for (String startNode : dag.getBeginNode()) {
runningNodeMap.put(startNode, parentTreeViewDtoList);
}
while (Stopper.isRunning()) {
Set<String> postNodeList;
Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, List<TreeViewDto>> en = iter.next();
String nodeCode = en.getKey();
parentTreeViewDtoList = en.getValue();
TreeViewDto treeViewDto = new TreeViewDto();
TaskNode taskNode = dag.getNode(nodeCode);
treeViewDto.setType(taskNode.getType());
treeViewDto.setCode(taskNode.getCode());
treeViewDto.setName(taskNode.getName());
//set treeViewDto instances
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstance.getId(), Long.parseLong(nodeCode));
if (taskInstance == null) {
treeViewDto.getInstances().add(new Instance(-1, "not running", 0, "null"));
} else {
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
long subProcessCode = 0L;
// if process is sub process, the return sub id, or sub id=0
if (taskInstance.isSubProcess()) {
TaskDefinition taskDefinition = taskDefinitionMap.get(taskInstance.getTaskCode());
subProcessCode = Integer.parseInt(JSONUtils.parseObject(
taskDefinition.getTaskParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_CODE).asText());
}
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskCode(),
taskInstance.getTaskType(), taskInstance.getState().toString(), taskInstance.getStartTime(), taskInstance.getEndTime(),
taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessCode));
}
}
for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) {
pTreeViewDto.getChildren().add(treeViewDto);
}
postNodeList = dag.getSubsequentNodes(nodeCode);
if (CollectionUtils.isNotEmpty(postNodeList)) {
for (String nextNodeCode : postNodeList) {
List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeCode);
if (CollectionUtils.isEmpty(treeViewDtoList)) {
treeViewDtoList = new ArrayList<>();
}
treeViewDtoList.add(treeViewDto);
waitingRunningNodeMap.put(nextNodeCode, treeViewDtoList);
}
}
runningNodeMap.remove(nodeCode);
}
if (waitingRunningNodeMap.size() == 0) {
break;
} else {
runningNodeMap.putAll(waitingRunningNodeMap);
waitingRunningNodeMap.clear();
}
}
result.put(Constants.DATA_LIST, parentTreeViewDto);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
/**
* whether the graph has a ring
*
* @param taskNodeResponseList task node response list
* @return if graph has cycle flag
*/
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) {
DAG<String, TaskNode, String> graph = new DAG<>();
// Fill the vertices
for (TaskNode taskNodeResponse : taskNodeResponseList) {
graph.addNode(Long.toString(taskNodeResponse.getCode()), taskNodeResponse);
}
// Fill edge relations
for (TaskNode taskNodeResponse : taskNodeResponseList) {
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
if (CollectionUtils.isNotEmpty(preTasks)) {
for (String preTask : preTasks) {
if (!graph.addEdge(preTask, Long.toString(taskNodeResponse.getCode()))) {
return true;
}
}
}
}
return graph.hasCycle();
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
@Override
public Map<String, Object> batchCopyProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode) {
Map<String, Object> result = checkParams(loginUser, projectCode, codes, targetProjectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<String> failedProcessList = new ArrayList<>();
doBatchOperateProcessDefinition(loginUser, targetProjectCode, failedProcessList, codes, result, true);
if (result.get(Constants.STATUS) == Status.NOT_SUPPORT_COPY_TASK_TYPE) {
return result;
}
checkBatchOperateResult(projectCode, targetProjectCode, result, failedProcessList, true);
return result;
}
/**
* batch move process definition
* Will be deleted
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> batchMoveProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode) {
Map<String, Object> result = checkParams(loginUser, projectCode, codes, targetProjectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (projectCode == targetProjectCode) {
return result;
}
List<String> failedProcessList = new ArrayList<>();
doBatchOperateProcessDefinition(loginUser, targetProjectCode, failedProcessList, codes, result, false);
checkBatchOperateResult(projectCode, targetProjectCode, result, failedProcessList, false);
return result;
}
private Map<String, Object> checkParams(User loginUser,
long projectCode,
String processDefinitionCodes,
long targetProjectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (org.apache.commons.lang.StringUtils.isEmpty(processDefinitionCodes)) {
putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY, processDefinitionCodes);
return result;
}
if (projectCode != targetProjectCode) {
Project targetProject = projectMapper.queryByCode(targetProjectCode);
//check user access for project
Map<String, Object> targetResult = projectService.checkProjectAndAuth(loginUser, targetProject, targetProjectCode);
if (targetResult.get(Constants.STATUS) != Status.SUCCESS) {
return targetResult;
}
}
return result;
}
private void doBatchOperateProcessDefinition(User loginUser,
long targetProjectCode,
List<String> failedProcessList,
String processDefinitionCodes,
Map<String, Object> result,
boolean isCopy) {
Set<Long> definitionCodes = Arrays.stream(processDefinitionCodes.split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(definitionCodes);
Set<Long> queryCodes = processDefinitionList.stream().map(ProcessDefinition::getCode).collect(Collectors.toSet());
// definitionCodes - queryCodes
Set<Long> diffCode = definitionCodes.stream().filter(code -> !queryCodes.contains(code)).collect(Collectors.toSet());
diffCode.forEach(code -> failedProcessList.add(code + "[null]"));
for (ProcessDefinition processDefinition : processDefinitionList) {
List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode());
List<ProcessTaskRelationLog> taskRelationList = processTaskRelations.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
processDefinition.setProjectCode(targetProjectCode);
if (isCopy) {
List<TaskDefinitionLog> taskDefinitionLogs = processService.genTaskDefineList(processTaskRelations);
Map<Long, Long> taskCodeMap = new HashMap<>();
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (TaskType.CONDITIONS.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.SWITCH.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.SUB_PROCESS.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.DEPENDENT.getDesc().equals(taskDefinitionLog.getTaskType())) {
putMsg(result, Status.NOT_SUPPORT_COPY_TASK_TYPE, taskDefinitionLog.getTaskType());
return;
}
try {
long taskCode = CodeGenerateUtils.getInstance().genCode();
taskCodeMap.put(taskDefinitionLog.getCode(), taskCode);
taskDefinitionLog.setCode(taskCode);
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
taskDefinitionLog.setProjectCode(targetProjectCode);
taskDefinitionLog.setVersion(0);
taskDefinitionLog.setName(taskDefinitionLog.getName() + "_copy_" + DateUtils.getCurrentTimeStamp());
}
for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) {
if (processTaskRelationLog.getPreTaskCode() > 0) {
processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode()));
}
if (processTaskRelationLog.getPostTaskCode() > 0) {
processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode()));
}
}
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
processDefinition.setId(0);
processDefinition.setUserId(loginUser.getId());
processDefinition.setName(processDefinition.getName() + "_copy_" + DateUtils.getCurrentTimeStamp());
if (StringUtils.isNotBlank(processDefinition.getLocations())) {
ArrayNode jsonNodes = JSONUtils.parseArray(processDefinition.getLocations());
for (int i = 0; i < jsonNodes.size(); i++) {
ObjectNode node = (ObjectNode) jsonNodes.path(i);
node.put("taskCode", taskCodeMap.get(node.get("taskCode").asLong()));
jsonNodes.set(i, node);
}
processDefinition.setLocations(JSONUtils.toJsonString(jsonNodes));
}
try {
result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs));
} catch (Exception e) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.COPY_PROCESS_DEFINITION_ERROR);
}
} else {
try {
result.putAll(updateDagDefine(loginUser, taskRelationList, processDefinition, null, Lists.newArrayList()));
} catch (Exception e) {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.MOVE_PROCESS_DEFINITION_ERROR);
}
}
if (result.get(Constants.STATUS) != Status.SUCCESS) {
failedProcessList.add(processDefinition.getCode() + "[" + processDefinition.getName() + "]");
}
}
}
/**
* switch the defined process definition version
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param version the version user want to switch
* @return switch process definition version result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, long projectCode, long code, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR, code);
return result;
}
ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, version);
if (Objects.isNull(processDefinitionLog)) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR, processDefinition.getCode(), version);
return result;
}
int switchVersion = processService.switchVersion(processDefinition, processDefinitionLog);
if (switchVersion <= 0) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
throw new ServiceException(Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check batch operate result
*
* @param srcProjectCode srcProjectCode
* @param targetProjectCode targetProjectCode
* @param result result
* @param failedProcessList failedProcessList
* @param isCopy isCopy
*/
private void checkBatchOperateResult(long srcProjectCode, long targetProjectCode,
Map<String, Object> result, List<String> failedProcessList, boolean isCopy) {
if (!failedProcessList.isEmpty()) {
if (isCopy) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, String.join(",", failedProcessList));
} else {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, String.join(",", failedProcessList));
}
} else {
putMsg(result, Status.SUCCESS);
}
}
/**
* query the pagination versions info by one certain process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param pageNo page number
* @param pageSize page size
* @param code process definition code
* @return the pagination process definition versions info of the certain process definition
*/
@Override
public Result queryProcessDefinitionVersions(User loginUser, long projectCode, int pageNo, int pageSize, long code) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
// check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
PageInfo<ProcessDefinitionLog> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<ProcessDefinitionLog> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinitionLog> processDefinitionVersionsPaging = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code, projectCode);
List<ProcessDefinitionLog> processDefinitionLogs = processDefinitionVersionsPaging.getRecords();
pageInfo.setTotalList(processDefinitionLogs);
pageInfo.setTotal((int) processDefinitionVersionsPaging.getTotal());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete one certain process definition by version number and process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param code process definition code
* @param version version number
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionVersion(User loginUser, long projectCode, long code, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
} else {
if (processDefinition.getVersion() == version) {
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
return result;
}
int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version);
int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(code, version);
if (deleteLog == 0 || deleteRelationLog == 0) {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> createEmptyProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, "", timeout, loginUser.getId(), tenantId);
processDefinition.setExecutionType(executionType);
result = createEmptyDagDefine(loginUser, processDefinition);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// save dag schedule
Map<String, Object> scheduleResult = createDagSchedule(loginUser, processDefinition, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> createEmptyDagDefine(User loginUser, ProcessDefinition processDefinition) {
Map<String, Object> result = new HashMap<>();
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
return result;
}
private Map<String, Object> createDagSchedule(User loginUser, ProcessDefinition processDefinition, String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule scheduleObj = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (scheduleObj == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
Date now = new Date();
scheduleObj.setProcessDefinitionCode(processDefinition.getCode());
if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) {
logger.error("{} verify failure", scheduleObj.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab());
return result;
}
scheduleObj.setWarningType(scheduleObj.getWarningType() == null ? WarningType.NONE : scheduleObj.getWarningType());
scheduleObj.setWarningGroupId(scheduleObj.getWarningGroupId() == 0 ? 1 : scheduleObj.getWarningGroupId());
scheduleObj.setFailureStrategy(scheduleObj.getFailureStrategy() == null ? FailureStrategy.CONTINUE : scheduleObj.getFailureStrategy());
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(scheduleObj.getProcessInstancePriority() == null ? Priority.MEDIUM : scheduleObj.getProcessInstancePriority());
scheduleObj.setWorkerGroup(scheduleObj.getWorkerGroup() == null ? "default" : scheduleObj.getWorkerGroup());
scheduleObj.setEnvironmentCode(scheduleObj.getEnvironmentCode() == null ? -1 : scheduleObj.getEnvironmentCode());
scheduleMapper.insert(scheduleObj);
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());
return result;
}
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @param executionType executionType
* @return update result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateProcessDefinitionBasicInfo(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
}
if (!name.equals(processDefinition.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
}
ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
processDefinition.set(projectCode, name, description, globalParams, "", timeout, tenantId);
processDefinition.setExecutionType(executionType);
List<ProcessTaskRelationLog> taskRelationList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion());
result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, Lists.newArrayList());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// update dag schedule
Map<String, Object> scheduleResult = updateDagSchedule(loginUser, projectCode, code, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> updateDagSchedule(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule schedule = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (schedule == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
// set default value
FailureStrategy failureStrategy = schedule.getFailureStrategy() == null ? FailureStrategy.CONTINUE : schedule.getFailureStrategy();
WarningType warningType = schedule.getWarningType() == null ? WarningType.NONE : schedule.getWarningType();
Priority processInstancePriority = schedule.getProcessInstancePriority() == null ? Priority.MEDIUM : schedule.getProcessInstancePriority();
int warningGroupId = schedule.getWarningGroupId() == 0 ? 1 : schedule.getWarningGroupId();
String workerGroup = schedule.getWorkerGroup() == null ? "default" : schedule.getWorkerGroup();
long environmentCode = schedule.getEnvironmentCode() == null ? -1 : schedule.getEnvironmentCode();
ScheduleParam param = new ScheduleParam();
param.setStartTime(schedule.getStartTime());
param.setEndTime(schedule.getEndTime());
param.setCrontab(schedule.getCrontab());
param.setTimezoneId(schedule.getTimezoneId());
return schedulerService.updateScheduleByProcessDefinitionCode(
loginUser,
projectCode,
processDefinitionCode,
JSONUtils.toJsonString(param),
warningType,
warningGroupId,
failureStrategy,
processInstancePriority,
workerGroup,
environmentCode);
}
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> releaseWorkflowAndSchedule(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
scheduleObj.setReleaseState(ReleaseState.ONLINE);
scheduleMapper.updateById(scheduleObj);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
if (updateProcess > 0) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, scheduleObj.getId(), code);
// set status
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
int updateSchedule = scheduleMapper.updateById(scheduleObj);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
}
schedulerService.deleteSchedule(project.getId(), scheduleObj.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,074 | [Feature][Github] Add stale mechanism to auto mark issues and PRs as stale | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We should add stale mechanism to auto mark issues and PRs as stale status to control the number, which can also remind us which need to be picked up again.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8074 | https://github.com/apache/dolphinscheduler/pull/8075 | 90ea6ebb6f6e5db0e0e444c32da8ccaa180bd459 | 6b9198acfcf8b61f554295edd0e09d07d674bc8e | "2022-01-16T16:00:05Z" | java | "2022-01-16T16:24:10Z" | .github/stale.yml | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,986 | [Bug] [dolphinscheduler-master] After deleting the running workflow, the master keeps brushing the error log | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/95271106/149263375-61e11064-1c79-4df5-b6a8-69cf3d311cde.png)
### What you expected to happen
.
### How to reproduce
.
### Anything else
_No response_
### Version
2.0.2
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7986 | https://github.com/apache/dolphinscheduler/pull/8065 | 6b9198acfcf8b61f554295edd0e09d07d674bc8e | c0d69125eb1c8ddb2993697533e9b490f2611cc1 | "2022-01-13T03:59:22Z" | java | "2022-01-17T02:29:59Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service impl
*/
@Service
public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService {
public static final String TASK_TYPE = "taskType";
public static final String LOCAL_PARAMS_LIST = "localParamsList";
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefineMapper;
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ExecutorService execService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
LoggerService loggerService;
@Autowired
ProcessDefinitionLogMapper processDefinitionLogMapper;
@Autowired
TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
UsersService usersService;
@Autowired
private TenantMapper tenantMapper;
@Autowired
TaskDefinitionMapper taskDefinitionMapper;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
*/
@Override
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, long projectCode, int size, String startTime, String endTime) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (0 > size) {
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size);
return result;
}
if (Objects.isNull(startTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME);
return result;
}
Date start = DateUtils.stringToDate(startTime);
if (Objects.isNull(endTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME);
return result;
}
Date end = DateUtils.stringToDate(endTime);
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
return result;
}
if (start.getTime() > end.getTime()) {
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime);
return result;
}
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS, projectCode);
result.put(DATA_LIST, processInstances);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
* @param loginUser login user
* @param projectCode project code
* @param processId process instance id
* @return process instance detail
*/
@Override
public Map<String, Object> queryProcessInstanceById(User loginUser, long projectCode, Integer processId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
} else {
processInstance.setWarningGroupId(processDefinition.getWarningGroupId());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setDagData(processService.genDagData(processDefinition));
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status
*
* @param loginUser login user
* @param projectCode project code
* @param processDefineCode process definition code
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startDate start time
* @param endDate end time
* @return process instance list
*/
@Override
public Result queryProcessInstanceList(User loginUser, long projectCode, long processDefineCode, String startDate, String endDate, String searchVal, String executorName,
ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
putMsg(result,resultEnum);
return result;
}
int[] statusArray = null;
// filter by state
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Map<String, Object> checkAndParseDateResult = checkAndParseDateParameters(startDate, endDate);
resultEnum = (Status) checkAndParseDateResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
putMsg(result,resultEnum);
return result;
}
Date start = (Date) checkAndParseDateResult.get(Constants.START_TIME);
Date end = (Date) checkAndParseDateResult.get(Constants.END_TIME);
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo<ProcessInstance> pageInfo = new PageInfo<>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList = processInstanceMapper.queryProcessInstanceListPaging(page,
project.getCode(), processDefineCode, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
List<Integer> userIds = Collections.emptyList();
if (CollectionUtils.isNotEmpty(processInstances)) {
userIds = processInstances.stream().map(ProcessInstance::getExecutorId).collect(Collectors.toList());
}
List<User> users = usersService.queryUser(userIds);
Map<Integer, User> idToUserMap = Collections.emptyMap();
if (CollectionUtils.isNotEmpty(users)) {
idToUserMap = users.stream().collect(Collectors.toMap(User::getId, Function.identity()));
}
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = idToUserMap.get(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotal((int) processInstanceList.getTotal());
pageInfo.setTotalList(processInstances);
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task list by process instance id
*
* @param loginUser login user
* @param projectCode project code
* @param processId process instance id
* @return task list for the process instance
* @throws IOException io exception
*/
@Override
public Map<String, Object> queryTaskListByProcessId(User loginUser, long projectCode, Integer processId) throws IOException {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId);
return result;
}
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* add dependent result for dependent task
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (TaskType.DEPENDENT.getDesc().equalsIgnoreCase(taskInstance.getTaskType())) {
Result<String> logResult = loggerService.queryLog(
taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT);
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
}
}
}
}
@Override
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim());
resultMap.put(key, dependResult);
}
}
return resultMap;
}
/**
* query sub process instance detail info by task id
*
* @param loginUser login user
* @param projectCode project code
* @param taskId task id
* @return sub process instance detail
*/
@Override
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, long projectCode, Integer taskId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskInstance.getTaskCode());
if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
}
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance
*
* @param loginUser login user
* @param projectCode project code
* @param taskRelationJson process task relation json
* @param taskDefinitionJson taskDefinitionJson
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> updateProcessInstance(User loginUser, long projectCode, Integer processInstanceId, String taskRelationJson,
String taskDefinitionJson, String scheduleTime, Boolean syncDefine, String globalParams,
String locations, int timeout, String tenantCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
//check process instance exists
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance exists in project
ProcessDefinition processDefinition0 = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition0 != null && projectCode != processDefinition0.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
return result;
}
setProcessInstance(processInstance, tenantCode, scheduleTime, globalParams, timeout);
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (taskDefinitionLogs.isEmpty()) {
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) {
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, syncDefine);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
//check workflow json is valid
result = processDefinitionService.checkProcessNodeList(taskRelationJson);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
processDefinition.set(projectCode, processDefinition.getName(), processDefinition.getDescription(), globalParams, locations, timeout, tenantId);
processDefinition.setUpdateTime(new Date());
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, syncDefine, Boolean.FALSE);
if (insertVersion == 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, syncDefine);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
processInstance.setProcessDefinitionVersion(insertVersion);
int update = processService.updateProcessInstance(processInstance);
if (update == 0) {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance attributes
*/
private void setProcessInstance(ProcessInstance processInstance, String tenantCode, String scheduleTime, String globalParams, int timeout) {
Date schedule = processInstance.getScheduleTime();
if (scheduleTime != null) {
schedule = DateUtils.getScheduleDate(scheduleTime);
}
processInstance.setScheduleTime(schedule);
List<Property> globalParamList = JSONUtils.toList(globalParams, Property.class);
Map<String, String> globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, processInstance.getCmdTypeIfComplement(), schedule);
processInstance.setTimeout(timeout);
processInstance.setTenantCode(tenantCode);
processInstance.setGlobalParams(globalParams);
}
/**
* query parent process instance detail info by sub process instance id
*
* @param loginUser login user
* @param projectCode project code
* @param subId sub process id
* @return parent instance detail
*/
@Override
public Map<String, Object> queryParentInstanceBySubId(User loginUser, long projectCode, Integer subId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
}
if (subInstance.getIsSubProcess() == Flag.NO) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
return result;
}
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.PARENT_WORKFLOW_INSTANCE, parentWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete process instance by id, at the same time,delete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectCode project code
* @param processInstanceId process instance id
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, long projectCode, Integer processInstanceId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId));
return result;
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId));
return result;
}
try {
processService.removeTaskLogFile(processInstanceId);
} catch (Exception e) {
}
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
processService.deleteWorkTaskInstanceByProcessInstanceId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
}
return result;
}
/**
* view process instance variables
*
* @param projectCode project code
* @param processInstanceId process instance id
* @return variables data
*/
@Override
public Map<String, Object> viewVariables(long projectCode, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String userDefinedParams = processInstance.getGlobalParams();
// global params
List<Property> globalParams = new ArrayList<>();
// global param string
String globalParamStr = ParameterUtils.convertParameterPlaceholders(JSONUtils.toJsonString(globalParams), timeParams);
globalParams = JSONUtils.toList(globalParamStr, Property.class);
for (Property property : globalParams) {
timeParams.put(property.getProp(), property.getValue());
}
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
Map<String, Map<String, Object>> localUserDefParams = getLocalParams(processInstance, timeParams);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get local params
*/
private Map<String, Map<String, Object>> getLocalParams(ProcessInstance processInstance, Map<String, String> timeParams) {
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
List<TaskInstance> taskInstanceList = taskInstanceMapper.findValidTaskListByProcessId(processInstance.getId(), Flag.YES);
for (TaskInstance taskInstance : taskInstanceList) {
TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(
taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion());
String localParams = JSONUtils.getNodeString(taskDefinitionLog.getTaskParams(), LOCAL_PARAMS);
if (!StringUtils.isEmpty(localParams)) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put(TASK_TYPE, taskDefinitionLog.getTaskType());
localParamsMap.put(LOCAL_PARAMS_LIST, localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskDefinitionLog.getName(), localParamsMap);
}
}
}
return localUserDefParams;
}
/**
* encapsulation gantt structure
*
* @param projectCode project code
* @param processInstanceId process instance id
* @return gantt tree data
* @throws Exception exception when json parse
*/
@Override
public Map<String, Object> viewGantt(long projectCode, Integer processInstanceId) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
ProcessDefinition processDefinition = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(
processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion()
);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
GanttDto ganttDto = new GanttDto();
DAG<String, TaskNode, TaskNodeRelation> dag = processService.genDagGraph(processDefinition);
//topological sort
List<String> nodeList = dag.topologicalSort();
ganttDto.setTaskNames(nodeList);
List<Task> taskList = new ArrayList<>();
for (String node : nodeList) {
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstanceId, Long.parseLong(node));
if (taskInstance == null) {
continue;
}
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
Task task = new Task();
task.setTaskName(taskInstance.getName());
task.getStartDate().add(startTime.getTime());
task.getEndDate().add(endTime.getTime());
task.setIsoStart(startTime);
task.setIsoEnd(endTime);
task.setStatus(taskInstance.getState().toString());
task.setExecutionDate(taskInstance.getStartTime());
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime()));
taskList.add(task);
}
ganttDto.setTasks(taskList);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by processDefinitionCode and stateArray
*
* @param processDefinitionCode processDefinitionCode
* @param states states array
* @return process instance list
*/
@Override
public List<ProcessInstance> queryByProcessDefineCodeAndStatus(Long processDefinitionCode, int[] states) {
return processInstanceMapper.queryByProcessDefineCodeAndStatus(processDefinitionCode, states);
}
/**
* query process instance by processDefinitionCode
*
* @param processDefinitionCode processDefinitionCode
* @param size size
* @return process instance list
*/
@Override
public List<ProcessInstance> queryByProcessDefineCode(Long processDefinitionCode, int size) {
return processInstanceMapper.queryByProcessDefineCode(processDefinitionCode, size);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,056 | [Bug] [Environment Edit Worker Group] Edit the bug of worker grouping in environment management. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
### There are problems in Dev and 202 and 203
- Create a new environment, configure workers, and select multiple groups
![b2582efd4dee1545eabd94af875ee51](https://user-images.githubusercontent.com/19239641/149605118-16ed1371-df46-4712-8fe4-961dfc7abafc.png)
Edit this data and delete two groups.
![27efec46513e5e8e1b52a3aa3402913](https://user-images.githubusercontent.com/19239641/149605122-25d4df15-ac8a-4a97-8d6f-2760823d95fb.png)
Edit interface request succeeded.
![e68d65e7f2c0957af979d2693698a9c](https://user-images.githubusercontent.com/19239641/149605125-a51c732e-eb7a-4f7e-9e5c-568d98350347.png)
The list interface returns no group.
![ea9c89554aa1bc17194125d3cc7a627](https://user-images.githubusercontent.com/19239641/149605130-9fa8f586-29a7-4038-8c05-e8510b2e95ff.png)
![9ec424f4d71a6fc3ca139f872c38f2e](https://user-images.githubusercontent.com/19239641/149605181-1d20ac5d-604d-4d7a-8fad-165a4d7d0815.png)
### What you expected to happen
After editing the calling interface, the worker group retains a piece of data that should be returned in the list interface, but the data returned from the list interface is empty.
### How to reproduce
For the data of the new environment, the worker selects multiple data and edits the data just created. The worker keeps at least one data, and then the data returned from the list interface is empty
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8056 | https://github.com/apache/dolphinscheduler/pull/8072 | c0d69125eb1c8ddb2993697533e9b490f2611cc1 | 7391cc20f01bef62b9a30f7b6b7f54fb41bdab03 | "2022-01-15T02:19:47Z" | java | "2022-01-17T02:30:40Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.EnvironmentDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.EnvironmentService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Environment;
import org.apache.dolphinscheduler.dao.entity.EnvironmentWorkerGroupRelation;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper;
import org.apache.dolphinscheduler.dao.mapper.EnvironmentWorkerGroupRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections4.SetUtils;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.type.TypeReference;
/**
* task definition service impl
*/
@Service
public class EnvironmentServiceImpl extends BaseServiceImpl implements EnvironmentService {
private static final Logger logger = LoggerFactory.getLogger(EnvironmentServiceImpl.class);
@Autowired
private EnvironmentMapper environmentMapper;
@Autowired
private EnvironmentWorkerGroupRelationMapper relationMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
/**
* create environment
*
* @param loginUser login user
* @param name environment name
* @param config environment config
* @param desc environment desc
* @param workerGroups worker groups
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> createEnvironment(User loginUser, String name, String config, String desc, String workerGroups) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Map<String, Object> checkResult = checkParams(name,config,workerGroups);
if (checkResult.get(Constants.STATUS) != Status.SUCCESS) {
return checkResult;
}
Environment environment = environmentMapper.queryByEnvironmentName(name);
if (environment != null) {
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name);
return result;
}
Environment env = new Environment();
env.setName(name);
env.setConfig(config);
env.setDescription(desc);
env.setOperator(loginUser.getId());
env.setCreateTime(new Date());
env.setUpdateTime(new Date());
long code = 0L;
try {
code = CodeGenerateUtils.getInstance().genCode();
env.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Environment code get error, ", e);
}
if (code == 0L) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating environment code");
return result;
}
if (environmentMapper.insert(env) > 0) {
if (!StringUtils.isEmpty(workerGroups)) {
List<String> workerGroupList = JSONUtils.parseObject(workerGroups, new TypeReference<List<String>>(){});
if (CollectionUtils.isNotEmpty(workerGroupList)) {
workerGroupList.stream().forEach(workerGroup -> {
if (!StringUtils.isEmpty(workerGroup)) {
EnvironmentWorkerGroupRelation relation = new EnvironmentWorkerGroupRelation();
relation.setEnvironmentCode(env.getCode());
relation.setWorkerGroup(workerGroup);
relation.setOperator(loginUser.getId());
relation.setCreateTime(new Date());
relation.setUpdateTime(new Date());
relationMapper.insert(relation);
}
});
}
}
result.put(Constants.DATA_LIST, env.getCode());
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ENVIRONMENT_ERROR);
}
return result;
}
/**
* query environment paging
*
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return environment list page
*/
@Override
public Result queryEnvironmentListPaging(Integer pageNo, Integer pageSize, String searchVal) {
Result result = new Result();
Page<Environment> page = new Page<>(pageNo, pageSize);
IPage<Environment> environmentIPage = environmentMapper.queryEnvironmentListPaging(page, searchVal);
PageInfo<EnvironmentDto> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) environmentIPage.getTotal());
if (CollectionUtils.isNotEmpty(environmentIPage.getRecords())) {
Map<Long, List<String>> relationMap = relationMapper.selectList(null).stream()
.collect(Collectors.groupingBy(EnvironmentWorkerGroupRelation::getEnvironmentCode,Collectors.mapping(EnvironmentWorkerGroupRelation::getWorkerGroup,Collectors.toList())));
List<EnvironmentDto> dtoList = environmentIPage.getRecords().stream().map(environment -> {
EnvironmentDto dto = new EnvironmentDto();
BeanUtils.copyProperties(environment,dto);
List<String> workerGroups = relationMap.getOrDefault(environment.getCode(),new ArrayList<String>());
dto.setWorkerGroups(workerGroups);
return dto;
}).collect(Collectors.toList());
pageInfo.setTotalList(dtoList);
} else {
pageInfo.setTotalList(new ArrayList<>());
}
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query all environment
*
* @return all environment list
*/
@Override
public Map<String, Object> queryAllEnvironmentList() {
Map<String,Object> result = new HashMap<>();
List<Environment> environmentList = environmentMapper.queryAllEnvironmentList();
if (CollectionUtils.isNotEmpty(environmentList)) {
Map<Long, List<String>> relationMap = relationMapper.selectList(null).stream()
.collect(Collectors.groupingBy(EnvironmentWorkerGroupRelation::getEnvironmentCode,Collectors.mapping(EnvironmentWorkerGroupRelation::getWorkerGroup,Collectors.toList())));
List<EnvironmentDto> dtoList = environmentList.stream().map(environment -> {
EnvironmentDto dto = new EnvironmentDto();
BeanUtils.copyProperties(environment,dto);
List<String> workerGroups = relationMap.getOrDefault(environment.getCode(),new ArrayList<String>());
dto.setWorkerGroups(workerGroups);
return dto;
}).collect(Collectors.toList());
result.put(Constants.DATA_LIST,dtoList);
} else {
result.put(Constants.DATA_LIST, new ArrayList<>());
}
putMsg(result,Status.SUCCESS);
return result;
}
/**
* query environment
*
* @param code environment code
*/
@Override
public Map<String, Object> queryEnvironmentByCode(Long code) {
Map<String, Object> result = new HashMap<>();
Environment env = environmentMapper.queryByEnvironmentCode(code);
if (env == null) {
putMsg(result, Status.QUERY_ENVIRONMENT_BY_CODE_ERROR, code);
} else {
List<String> workerGroups = relationMapper.queryByEnvironmentCode(env.getCode()).stream()
.map(item -> item.getWorkerGroup())
.collect(Collectors.toList());
EnvironmentDto dto = new EnvironmentDto();
BeanUtils.copyProperties(env,dto);
dto.setWorkerGroups(workerGroups);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* query environment
*
* @param name environment name
*/
@Override
public Map<String, Object> queryEnvironmentByName(String name) {
Map<String, Object> result = new HashMap<>();
Environment env = environmentMapper.queryByEnvironmentName(name);
if (env == null) {
putMsg(result, Status.QUERY_ENVIRONMENT_BY_NAME_ERROR, name);
} else {
List<String> workerGroups = relationMapper.queryByEnvironmentCode(env.getCode()).stream()
.map(item -> item.getWorkerGroup())
.collect(Collectors.toList());
EnvironmentDto dto = new EnvironmentDto();
BeanUtils.copyProperties(env,dto);
dto.setWorkerGroups(workerGroups);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* delete environment
*
* @param loginUser login user
* @param code environment code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteEnvironmentByCode(User loginUser, Long code) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Integer relatedTaskNumber = taskDefinitionMapper
.selectCount(new QueryWrapper<TaskDefinition>().lambda().eq(TaskDefinition::getEnvironmentCode,code));
if (relatedTaskNumber > 0) {
putMsg(result, Status.DELETE_ENVIRONMENT_RELATED_TASK_EXISTS);
return result;
}
int delete = environmentMapper.deleteByCode(code);
if (delete > 0) {
relationMapper.delete(new QueryWrapper<EnvironmentWorkerGroupRelation>()
.lambda()
.eq(EnvironmentWorkerGroupRelation::getEnvironmentCode,code));
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_ENVIRONMENT_ERROR);
}
return result;
}
/**
* update environment
*
* @param loginUser login user
* @param code environment code
* @param name environment name
* @param config environment config
* @param desc environment desc
* @param workerGroups worker groups
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> updateEnvironmentByCode(User loginUser, Long code, String name, String config, String desc, String workerGroups) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
Map<String, Object> checkResult = checkParams(name,config,workerGroups);
if (checkResult.get(Constants.STATUS) != Status.SUCCESS) {
return checkResult;
}
Environment environment = environmentMapper.queryByEnvironmentName(name);
if (environment != null && !environment.getCode().equals(code)) {
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name);
return result;
}
Set<String> workerGroupSet;
if (!StringUtils.isEmpty(workerGroups)) {
workerGroupSet = JSONUtils.parseObject(workerGroups, new TypeReference<Set<String>>() {});
} else {
workerGroupSet = new TreeSet<>();
}
Set<String> existWorkerGroupSet = relationMapper
.queryByEnvironmentCode(code)
.stream()
.map(item -> item.getWorkerGroup())
.collect(Collectors.toSet());
Set<String> deleteWorkerGroupSet = SetUtils.difference(existWorkerGroupSet,workerGroupSet).toSet();
Set<String> addWorkerGroupSet = SetUtils.difference(workerGroupSet,existWorkerGroupSet).toSet();
// verify whether the relation of this environment and worker groups can be adjusted
checkResult = checkUsedEnvironmentWorkerGroupRelation(deleteWorkerGroupSet, name, code);
if (checkResult.get(Constants.STATUS) != Status.SUCCESS) {
return checkResult;
}
Environment env = new Environment();
env.setCode(code);
env.setName(name);
env.setConfig(config);
env.setDescription(desc);
env.setOperator(loginUser.getId());
env.setUpdateTime(new Date());
int update = environmentMapper.update(env, new UpdateWrapper<Environment>().lambda().eq(Environment::getCode,code));
if (update > 0) {
deleteWorkerGroupSet.stream().forEach(key -> {
if (!StringUtils.isEmpty(key)) {
relationMapper.delete(new QueryWrapper<EnvironmentWorkerGroupRelation>()
.lambda()
.eq(EnvironmentWorkerGroupRelation::getEnvironmentCode,code));
}
});
addWorkerGroupSet.stream().forEach(key -> {
if (!StringUtils.isEmpty(key)) {
EnvironmentWorkerGroupRelation relation = new EnvironmentWorkerGroupRelation();
relation.setEnvironmentCode(code);
relation.setWorkerGroup(key);
relation.setUpdateTime(new Date());
relation.setCreateTime(new Date());
relation.setOperator(loginUser.getId());
relationMapper.insert(relation);
}
});
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_ENVIRONMENT_ERROR, name);
}
return result;
}
/**
* verify environment name
*
* @param environmentName environment name
* @return true if the environment name not exists, otherwise return false
*/
@Override
public Map<String, Object> verifyEnvironment(String environmentName) {
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(environmentName)) {
putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL);
return result;
}
Environment environment = environmentMapper.queryByEnvironmentName(environmentName);
if (environment != null) {
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, environmentName);
return result;
}
result.put(Constants.STATUS, Status.SUCCESS);
return result;
}
private Map<String, Object> checkUsedEnvironmentWorkerGroupRelation(Set<String> deleteKeySet,String environmentName, Long environmentCode) {
Map<String, Object> result = new HashMap<>();
for (String workerGroup : deleteKeySet) {
TaskDefinition taskDefinition = taskDefinitionMapper
.selectOne(new QueryWrapper<TaskDefinition>().lambda()
.eq(TaskDefinition::getEnvironmentCode,environmentCode)
.eq(TaskDefinition::getWorkerGroup,workerGroup));
if (Objects.nonNull(taskDefinition)) {
putMsg(result, Status.UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR,workerGroup,environmentName,taskDefinition.getName());
return result;
}
}
result.put(Constants.STATUS, Status.SUCCESS);
return result;
}
public Map<String, Object> checkParams(String name, String config, String workerGroups) {
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(name)) {
putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL);
return result;
}
if (StringUtils.isEmpty(config)) {
putMsg(result, Status.ENVIRONMENT_CONFIG_IS_NULL);
return result;
}
if (!StringUtils.isEmpty(workerGroups)) {
List<String> workerGroupList = JSONUtils.parseObject(workerGroups, new TypeReference<List<String>>(){});
if (Objects.isNull(workerGroupList)) {
putMsg(result, Status.ENVIRONMENT_WORKER_GROUPS_IS_INVALID);
return result;
}
}
result.put(Constants.STATUS, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/pnpm-lock.yaml | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
lockfileVersion: 5.3
specifiers:
'@types/node': ^16.11.19
'@types/nprogress': ^0.2.0
'@types/qs': ^6.9.7
'@typescript-eslint/eslint-plugin': ^5.9.0
'@typescript-eslint/parser': ^5.9.0
'@vicons/antd': ^0.11.0
'@vitejs/plugin-vue': ^1.10.2
'@vitejs/plugin-vue-jsx': ^1.3.3
'@vueuse/core': ^7.5.3
axios: ^0.24.0
dart-sass: ^1.48.0
date-fns: ^2.27.0
echarts: ^5.2.2
eslint: ^8.6.0
eslint-config-prettier: ^8.3.0
eslint-plugin-prettier: ^4.0.0
eslint-plugin-vue: ^8.2.0
lodash: ^4.17.21
monaco-editor: ^0.31.1
naive-ui: 2.23.2
nprogress: ^0.2.0
pinia: ^2.0.9
pinia-plugin-persistedstate: ^1.0.3
prettier: ^2.5.1
qs: ^6.10.2
sass: ^1.47.0
sass-loader: ^12.4.0
typescript: ^4.5.4
typescript-plugin-css-modules: ^3.4.0
vfonts: ^0.1.0
vite: ^2.7.10
vite-plugin-compression: ^0.3.6
vue: ^3.2.26
vue-i18n: ^9.2.0-beta.26
vue-router: ^4.0.12
vue-tsc: ^0.28.10
dependencies:
'@vueuse/core': 7.5.3_vue@3.2.26
axios: 0.24.0
date-fns: 2.28.0
echarts: 5.2.2
lodash: 4.17.21
monaco-editor: 0.31.1
naive-ui: 2.23.2_vue@3.2.26
nprogress: 0.2.0
pinia: 2.0.9_typescript@4.5.4+vue@3.2.26
pinia-plugin-persistedstate: 1.0.3_pinia@2.0.9
qs: 6.10.2
vfonts: 0.1.0
vue: 3.2.26
vue-i18n: 9.2.0-beta.26_vue@3.2.26
vue-router: 4.0.12_vue@3.2.26
devDependencies:
'@types/node': 16.11.19
'@types/nprogress': 0.2.0
'@types/qs': 6.9.7
'@typescript-eslint/eslint-plugin': 5.9.0_bd2fd93dbcc607ad2f21b784bccfe0c8
'@typescript-eslint/parser': 5.9.0_eslint@8.6.0+typescript@4.5.4
'@vicons/antd': 0.11.0
'@vitejs/plugin-vue': 1.10.2_vite@2.7.10
'@vitejs/plugin-vue-jsx': 1.3.3
dart-sass: 1.25.0
eslint: 8.6.0
eslint-config-prettier: 8.3.0_eslint@8.6.0
eslint-plugin-prettier: 4.0.0_1c588f61426b1faf18812943f1678311
eslint-plugin-vue: 8.2.0_eslint@8.6.0
prettier: 2.5.1
sass: 1.47.0
sass-loader: 12.4.0_sass@1.47.0
typescript: 4.5.4
typescript-plugin-css-modules: 3.4.0_typescript@4.5.4
vite: 2.7.10_sass@1.47.0
vite-plugin-compression: 0.3.6_vite@2.7.10
vue-tsc: 0.28.10_typescript@4.5.4
packages:
/@babel/code-frame/7.16.7:
resolution: {integrity: sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/highlight': 7.16.7
dev: true
/@babel/compat-data/7.16.4:
resolution: {integrity: sha512-1o/jo7D+kC9ZjHX5v+EHrdjl3PhxMrLSOTGsOdHJ+KL8HCaEK6ehrVL2RS6oHDZp+L7xLirLrPmQtEng769J/Q==}
engines: {node: '>=6.9.0'}
dev: true
/@babel/core/7.16.7:
resolution: {integrity: sha512-aeLaqcqThRNZYmbMqtulsetOQZ/5gbR/dWruUCJcpas4Qoyy+QeagfDsPdMrqwsPRDNxJvBlRiZxxX7THO7qtA==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/code-frame': 7.16.7
'@babel/generator': 7.16.7
'@babel/helper-compilation-targets': 7.16.7_@babel+core@7.16.7
'@babel/helper-module-transforms': 7.16.7
'@babel/helpers': 7.16.7
'@babel/parser': 7.16.7
'@babel/template': 7.16.7
'@babel/traverse': 7.16.7
'@babel/types': 7.16.7
convert-source-map: 1.8.0
debug: 4.3.3
gensync: 1.0.0-beta.2
json5: 2.2.0
semver: 6.3.0
source-map: 0.5.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/generator/7.16.7:
resolution: {integrity: sha512-/ST3Sg8MLGY5HVYmrjOgL60ENux/HfO/CsUh7y4MalThufhE/Ff/6EibFDHi4jiDCaWfJKoqbE6oTh21c5hrRg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
jsesc: 2.5.2
source-map: 0.5.7
dev: true
/@babel/helper-annotate-as-pure/7.16.7:
resolution: {integrity: sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-compilation-targets/7.16.7_@babel+core@7.16.7:
resolution: {integrity: sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
dependencies:
'@babel/compat-data': 7.16.4
'@babel/core': 7.16.7
'@babel/helper-validator-option': 7.16.7
browserslist: 4.19.1
semver: 6.3.0
dev: true
/@babel/helper-create-class-features-plugin/7.16.7_@babel+core@7.16.7:
resolution: {integrity: sha512-kIFozAvVfK05DM4EVQYKK+zteWvY85BFdGBRQBytRyY3y+6PX0DkDOn/CZ3lEuczCfrCxEzwt0YtP/87YPTWSw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
dependencies:
'@babel/core': 7.16.7
'@babel/helper-annotate-as-pure': 7.16.7
'@babel/helper-environment-visitor': 7.16.7
'@babel/helper-function-name': 7.16.7
'@babel/helper-member-expression-to-functions': 7.16.7
'@babel/helper-optimise-call-expression': 7.16.7
'@babel/helper-replace-supers': 7.16.7
'@babel/helper-split-export-declaration': 7.16.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/helper-environment-visitor/7.16.7:
resolution: {integrity: sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-function-name/7.16.7:
resolution: {integrity: sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-get-function-arity': 7.16.7
'@babel/template': 7.16.7
'@babel/types': 7.16.7
dev: true
/@babel/helper-get-function-arity/7.16.7:
resolution: {integrity: sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-hoist-variables/7.16.7:
resolution: {integrity: sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-member-expression-to-functions/7.16.7:
resolution: {integrity: sha512-VtJ/65tYiU/6AbMTDwyoXGPKHgTsfRarivm+YbB5uAzKUyuPjgZSgAFeG87FCigc7KNHu2Pegh1XIT3lXjvz3Q==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-module-imports/7.16.7:
resolution: {integrity: sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-module-transforms/7.16.7:
resolution: {integrity: sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-environment-visitor': 7.16.7
'@babel/helper-module-imports': 7.16.7
'@babel/helper-simple-access': 7.16.7
'@babel/helper-split-export-declaration': 7.16.7
'@babel/helper-validator-identifier': 7.16.7
'@babel/template': 7.16.7
'@babel/traverse': 7.16.7
'@babel/types': 7.16.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/helper-optimise-call-expression/7.16.7:
resolution: {integrity: sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-plugin-utils/7.16.7:
resolution: {integrity: sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==}
engines: {node: '>=6.9.0'}
dev: true
/@babel/helper-replace-supers/7.16.7:
resolution: {integrity: sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-environment-visitor': 7.16.7
'@babel/helper-member-expression-to-functions': 7.16.7
'@babel/helper-optimise-call-expression': 7.16.7
'@babel/traverse': 7.16.7
'@babel/types': 7.16.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/helper-simple-access/7.16.7:
resolution: {integrity: sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-split-export-declaration/7.16.7:
resolution: {integrity: sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/@babel/helper-validator-identifier/7.16.7:
resolution: {integrity: sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==}
engines: {node: '>=6.9.0'}
dev: true
/@babel/helper-validator-option/7.16.7:
resolution: {integrity: sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==}
engines: {node: '>=6.9.0'}
dev: true
/@babel/helpers/7.16.7:
resolution: {integrity: sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/template': 7.16.7
'@babel/traverse': 7.16.7
'@babel/types': 7.16.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/highlight/7.16.7:
resolution: {integrity: sha512-aKpPMfLvGO3Q97V0qhw/V2SWNWlwfJknuwAunU7wZLSfrM4xTBvg7E5opUVi1kJTBKihE38CPg4nBiqX83PWYw==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-validator-identifier': 7.16.7
chalk: 2.4.2
js-tokens: 4.0.0
dev: true
/@babel/parser/7.16.7:
resolution: {integrity: sha512-sR4eaSrnM7BV7QPzGfEX5paG/6wrZM3I0HDzfIAK06ESvo9oy3xBuVBxE3MbQaKNhvg8g/ixjMWo2CGpzpHsDA==}
engines: {node: '>=6.0.0'}
hasBin: true
/@babel/plugin-syntax-import-meta/7.10.4_@babel+core@7.16.7:
resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/core': 7.16.7
'@babel/helper-plugin-utils': 7.16.7
dev: true
/@babel/plugin-syntax-jsx/7.16.7_@babel+core@7.16.7:
resolution: {integrity: sha512-Esxmk7YjA8QysKeT3VhTXvF6y77f/a91SIs4pWb4H2eWGQkCKFgQaG6hdoEVZtGsrAcb2K5BW66XsOErD4WU3Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/core': 7.16.7
'@babel/helper-plugin-utils': 7.16.7
dev: true
/@babel/plugin-syntax-typescript/7.16.7_@babel+core@7.16.7:
resolution: {integrity: sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/core': 7.16.7
'@babel/helper-plugin-utils': 7.16.7
dev: true
/@babel/plugin-transform-typescript/7.16.7_@babel+core@7.16.7:
resolution: {integrity: sha512-Hzx1lvBtOCWuCEwMmYOfpQpO7joFeXLgoPuzZZBtTxXqSqUGUubvFGZv2ygo1tB5Bp9q6PXV3H0E/kf7KM0RLA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/core': 7.16.7
'@babel/helper-create-class-features-plugin': 7.16.7_@babel+core@7.16.7
'@babel/helper-plugin-utils': 7.16.7
'@babel/plugin-syntax-typescript': 7.16.7_@babel+core@7.16.7
transitivePeerDependencies:
- supports-color
dev: true
/@babel/template/7.16.7:
resolution: {integrity: sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/code-frame': 7.16.7
'@babel/parser': 7.16.7
'@babel/types': 7.16.7
dev: true
/@babel/traverse/7.16.7:
resolution: {integrity: sha512-8KWJPIb8c2VvY8AJrydh6+fVRo2ODx1wYBU2398xJVq0JomuLBZmVQzLPBblJgHIGYG4znCpUZUZ0Pt2vdmVYQ==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/code-frame': 7.16.7
'@babel/generator': 7.16.7
'@babel/helper-environment-visitor': 7.16.7
'@babel/helper-function-name': 7.16.7
'@babel/helper-hoist-variables': 7.16.7
'@babel/helper-split-export-declaration': 7.16.7
'@babel/parser': 7.16.7
'@babel/types': 7.16.7
debug: 4.3.3
globals: 11.12.0
transitivePeerDependencies:
- supports-color
dev: true
/@babel/types/7.16.7:
resolution: {integrity: sha512-E8HuV7FO9qLpx6OtoGfUQ2cjIYnbFwvZWYBS+87EwtdMvmUPJSwykpovFB+8insbpF0uJcpr8KMUi64XZntZcg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-validator-identifier': 7.16.7
to-fast-properties: 2.0.0
dev: true
/@css-render/plugin-bem/0.15.8_css-render@0.15.8:
resolution: {integrity: sha512-rJMFBaqZGrezs1d0vPB7hZTSkfFEgcdWxPMtLUmm9PAOioWtw+Knb9A0xU0bYazPKDo0SoNVVcd1Qd1LXD9x4Q==}
peerDependencies:
css-render: ~0.15.8
dependencies:
css-render: 0.15.8
dev: false
/@css-render/vue3-ssr/0.15.8_vue@3.2.26:
resolution: {integrity: sha512-zPFyOH47hEcyRgoIjNEwzYqZEq/LEXPafDXY/9EMpvgCmsPhW1bpC3w8YwAISaP519QesJ/8aNDL/xWKpwXBvQ==}
peerDependencies:
vue: ^3.0.11
dependencies:
vue: 3.2.26
dev: false
/@emmetio/abbreviation/2.2.2:
resolution: {integrity: sha512-TtE/dBnkTCct8+LntkqVrwqQao6EnPAs1YN3cUgxOxTaBlesBCY37ROUAVZrRlG64GNnVShdl/b70RfAI3w5lw==}
dependencies:
'@emmetio/scanner': 1.0.0
dev: true
/@emmetio/css-abbreviation/2.1.4:
resolution: {integrity: sha512-qk9L60Y+uRtM5CPbB0y+QNl/1XKE09mSO+AhhSauIfr2YOx/ta3NJw2d8RtCFxgzHeRqFRr8jgyzThbu+MZ4Uw==}
dependencies:
'@emmetio/scanner': 1.0.0
dev: true
/@emmetio/scanner/1.0.0:
resolution: {integrity: sha512-8HqW8EVqjnCmWXVpqAOZf+EGESdkR27odcMMMGefgKXtar00SoYNSryGv//TELI4T3QFsECo78p+0lmalk/CFA==}
dev: true
/@emotion/hash/0.8.0:
resolution: {integrity: sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==}
dev: false
/@eslint/eslintrc/1.0.5:
resolution: {integrity: sha512-BLxsnmK3KyPunz5wmCCpqy0YelEoxxGmH73Is+Z74oOTMtExcjkr3dDR6quwrjh1YspA8DH9gnX1o069KiS9AQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
ajv: 6.12.6
debug: 4.3.3
espree: 9.3.0
globals: 13.12.0
ignore: 4.0.6
import-fresh: 3.3.0
js-yaml: 4.1.0
minimatch: 3.0.4
strip-json-comments: 3.1.1
transitivePeerDependencies:
- supports-color
dev: true
/@humanwhocodes/config-array/0.9.2:
resolution: {integrity: sha512-UXOuFCGcwciWckOpmfKDq/GyhlTf9pN/BzG//x8p8zTOFEcGuA68ANXheFS0AGvy3qgZqLBUkMs7hqzqCKOVwA==}
engines: {node: '>=10.10.0'}
dependencies:
'@humanwhocodes/object-schema': 1.2.1
debug: 4.3.3
minimatch: 3.0.4
transitivePeerDependencies:
- supports-color
dev: true
/@humanwhocodes/object-schema/1.2.1:
resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==}
dev: true
/@intlify/core-base/9.2.0-beta.26:
resolution: {integrity: sha512-4OXSF0zC70UElev6gam4FdWfC7u4l2ouNK9Hc6R44QlpEXkzhbwwbpZn9gIGB2l11zQnJkAwaovfYyiNJ/JvkA==}
engines: {node: '>= 12'}
dependencies:
'@intlify/devtools-if': 9.2.0-beta.26
'@intlify/message-compiler': 9.2.0-beta.26
'@intlify/shared': 9.2.0-beta.26
'@intlify/vue-devtools': 9.2.0-beta.26
dev: false
/@intlify/devtools-if/9.2.0-beta.26:
resolution: {integrity: sha512-7GiF7v2jlHf7g5hoLF6sKlrX5Di2INfHN4PEAeVr2ashgoSaq1EYIRouBVAnJkOrDOJzCOEY7y53me7+gIbeTw==}
engines: {node: '>= 12'}
dependencies:
'@intlify/shared': 9.2.0-beta.26
dev: false
/@intlify/message-compiler/9.2.0-beta.26:
resolution: {integrity: sha512-qtDgHCMqrXNTekKXGzm0Dm6r3+/X7/jFXP+E07hx+PJbPMv7DzK1iU8h5LlAMQ1/jr2UIRBgXvR5wh35OKoGrA==}
engines: {node: '>= 12'}
dependencies:
'@intlify/shared': 9.2.0-beta.26
source-map: 0.6.1
dev: false
/@intlify/shared/9.2.0-beta.26:
resolution: {integrity: sha512-MjUlkjNThqkqy8yXUcFKBiW/hIfqAn5cP3Vd0b4wdOHS8rPCEbvSbAnF08uiZDkVv8gTcsLyymX21GaU6oYyyQ==}
engines: {node: '>= 12'}
dev: false
/@intlify/vue-devtools/9.2.0-beta.26:
resolution: {integrity: sha512-Ghe2xTSezRzL2fBdIbZEus5+5spE2RMxgjBg6GyvbsDy1M05o2OvukBninJ4/Tc2I5A30lYgAsF1gQlOyfmv1g==}
engines: {node: '>= 12'}
dependencies:
'@intlify/core-base': 9.2.0-beta.26
'@intlify/shared': 9.2.0-beta.26
dev: false
/@nodelib/fs.scandir/2.1.5:
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
engines: {node: '>= 8'}
dependencies:
'@nodelib/fs.stat': 2.0.5
run-parallel: 1.2.0
dev: true
/@nodelib/fs.stat/2.0.5:
resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
engines: {node: '>= 8'}
dev: true
/@nodelib/fs.walk/1.2.8:
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
dependencies:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.13.0
dev: true
/@rollup/pluginutils/4.1.2:
resolution: {integrity: sha512-ROn4qvkxP9SyPeHaf7uQC/GPFY6L/OWy9+bd9AwcjOAWQwxRscoEyAUD8qCY5o5iL4jqQwoLk2kaTKJPb/HwzQ==}
engines: {node: '>= 8.0.0'}
dependencies:
estree-walker: 2.0.2
picomatch: 2.3.1
dev: true
/@types/jest/27.4.0:
resolution: {integrity: sha512-gHl8XuC1RZ8H2j5sHv/JqsaxXkDDM9iDOgu0Wp8sjs4u/snb2PVehyWXJPr+ORA0RPpgw231mnutWI1+0hgjIQ==}
dependencies:
jest-diff: 27.4.6
pretty-format: 27.4.6
dev: false
/@types/json-schema/7.0.9:
resolution: {integrity: sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==}
dev: true
/@types/json5/0.0.29:
resolution: {integrity: sha1-7ihweulOEdK4J7y+UnC86n8+ce4=}
dev: true
/@types/lodash-es/4.17.5:
resolution: {integrity: sha512-SHBoI8/0aoMQWAgUHMQ599VM6ZiSKg8sh/0cFqqlQQMyY9uEplc0ULU5yQNzcvdR4ZKa0ey8+vFmahuRbOCT1A==}
dependencies:
'@types/lodash': 4.14.178
dev: false
/@types/lodash/4.14.178:
resolution: {integrity: sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==}
dev: false
/@types/node/14.14.45:
resolution: {integrity: sha512-DssMqTV9UnnoxDWu959sDLZzfvqCF0qDNRjaWeYSui9xkFe61kKo4l1TWNTQONpuXEm+gLMRvdlzvNHBamzmEw==}
dev: false
/@types/node/16.11.19:
resolution: {integrity: sha512-BPAcfDPoHlRQNKktbsbnpACGdypPFBuX4xQlsWDE7B8XXcfII+SpOLay3/qZmCLb39kV5S1RTYwXdkx2lwLYng==}
dev: true
/@types/nprogress/0.2.0:
resolution: {integrity: sha512-1cYJrqq9GezNFPsWTZpFut/d4CjpZqA0vhqDUPFWYKF1oIyBz5qnoYMzR+0C/T96t3ebLAC1SSnwrVOm5/j74A==}
dev: true
/@types/qs/6.9.7:
resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==}
dev: true
/@typescript-eslint/eslint-plugin/5.9.0_bd2fd93dbcc607ad2f21b784bccfe0c8:
resolution: {integrity: sha512-qT4lr2jysDQBQOPsCCvpPUZHjbABoTJW8V9ZzIYKHMfppJtpdtzszDYsldwhFxlhvrp7aCHeXD1Lb9M1zhwWwQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
'@typescript-eslint/parser': ^5.0.0
eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
dependencies:
'@typescript-eslint/experimental-utils': 5.9.0_eslint@8.6.0+typescript@4.5.4
'@typescript-eslint/parser': 5.9.0_eslint@8.6.0+typescript@4.5.4
'@typescript-eslint/scope-manager': 5.9.0
'@typescript-eslint/type-utils': 5.9.0_eslint@8.6.0+typescript@4.5.4
debug: 4.3.3
eslint: 8.6.0
functional-red-black-tree: 1.0.1
ignore: 5.2.0
regexpp: 3.2.0
semver: 7.3.5
tsutils: 3.21.0_typescript@4.5.4
typescript: 4.5.4
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/experimental-utils/5.9.0_eslint@8.6.0+typescript@4.5.4:
resolution: {integrity: sha512-ZnLVjBrf26dn7ElyaSKa6uDhqwvAi4jBBmHK1VxuFGPRAxhdi18ubQYSGA7SRiFiES3q9JiBOBHEBStOFkwD2g==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
dependencies:
'@types/json-schema': 7.0.9
'@typescript-eslint/scope-manager': 5.9.0
'@typescript-eslint/types': 5.9.0
'@typescript-eslint/typescript-estree': 5.9.0_typescript@4.5.4
eslint: 8.6.0
eslint-scope: 5.1.1
eslint-utils: 3.0.0_eslint@8.6.0
transitivePeerDependencies:
- supports-color
- typescript
dev: true
/@typescript-eslint/parser/5.9.0_eslint@8.6.0+typescript@4.5.4:
resolution: {integrity: sha512-/6pOPz8yAxEt4PLzgbFRDpZmHnXCeZgPDrh/1DaVKOjvn/UPMlWhbx/gA96xRi2JxY1kBl2AmwVbyROUqys5xQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
dependencies:
'@typescript-eslint/scope-manager': 5.9.0
'@typescript-eslint/types': 5.9.0
'@typescript-eslint/typescript-estree': 5.9.0_typescript@4.5.4
debug: 4.3.3
eslint: 8.6.0
typescript: 4.5.4
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/scope-manager/5.9.0:
resolution: {integrity: sha512-DKtdIL49Qxk2a8icF6whRk7uThuVz4A6TCXfjdJSwOsf+9ree7vgQWcx0KOyCdk0i9ETX666p4aMhrRhxhUkyg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
'@typescript-eslint/types': 5.9.0
'@typescript-eslint/visitor-keys': 5.9.0
dev: true
/@typescript-eslint/type-utils/5.9.0_eslint@8.6.0+typescript@4.5.4:
resolution: {integrity: sha512-uVCb9dJXpBrK1071ri5aEW7ZHdDHAiqEjYznF3HSSvAJXyrkxGOw2Ejibz/q6BXdT8lea8CMI0CzKNFTNI6TEQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: '*'
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
dependencies:
'@typescript-eslint/experimental-utils': 5.9.0_eslint@8.6.0+typescript@4.5.4
debug: 4.3.3
eslint: 8.6.0
tsutils: 3.21.0_typescript@4.5.4
typescript: 4.5.4
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/types/5.9.0:
resolution: {integrity: sha512-mWp6/b56Umo1rwyGCk8fPIzb9Migo8YOniBGPAQDNC6C52SeyNGN4gsVwQTAR+RS2L5xyajON4hOLwAGwPtUwg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dev: true
/@typescript-eslint/typescript-estree/5.9.0_typescript@4.5.4:
resolution: {integrity: sha512-kxo3xL2mB7XmiVZcECbaDwYCt3qFXz99tBSuVJR4L/sR7CJ+UNAPrYILILktGj1ppfZ/jNt/cWYbziJUlHl1Pw==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
dependencies:
'@typescript-eslint/types': 5.9.0
'@typescript-eslint/visitor-keys': 5.9.0
debug: 4.3.3
globby: 11.1.0
is-glob: 4.0.3
semver: 7.3.5
tsutils: 3.21.0_typescript@4.5.4
typescript: 4.5.4
transitivePeerDependencies:
- supports-color
dev: true
/@typescript-eslint/visitor-keys/5.9.0:
resolution: {integrity: sha512-6zq0mb7LV0ThExKlecvpfepiB+XEtFv/bzx7/jKSgyXTFD7qjmSu1FoiS0x3OZaiS+UIXpH2vd9O89f02RCtgw==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
'@typescript-eslint/types': 5.9.0
eslint-visitor-keys: 3.1.0
dev: true
/@vicons/antd/0.11.0:
resolution: {integrity: sha512-XRkUMJzfjr3Q5S6xbAg4ALI90DIGNhZdbVW6SejFi6EgxB5bts3YMMcM4HbF77vdQX5bhxgju3Hd3uP97OgvAQ==}
dev: true
/@vitejs/plugin-vue-jsx/1.3.3:
resolution: {integrity: sha512-VSBXVqMcxbgX85rgJC1eMWuZ9hzOJhWPvGYlKxyymPokE/i3Gykh5ljkCoNdxnKgIyFqv4WutYoYY93fgjbTxA==}
engines: {node: '>=12.0.0'}
dependencies:
'@babel/core': 7.16.7
'@babel/plugin-syntax-import-meta': 7.10.4_@babel+core@7.16.7
'@babel/plugin-transform-typescript': 7.16.7_@babel+core@7.16.7
'@rollup/pluginutils': 4.1.2
'@vue/babel-plugin-jsx': 1.1.1_@babel+core@7.16.7
hash-sum: 2.0.0
transitivePeerDependencies:
- supports-color
dev: true
/@vitejs/plugin-vue/1.10.2_vite@2.7.10:
resolution: {integrity: sha512-/QJ0Z9qfhAFtKRY+r57ziY4BSbGUTGsPRMpB/Ron3QPwBZM4OZAZHdTa4a8PafCwU5DTatXG8TMDoP8z+oDqJw==}
engines: {node: '>=12.0.0'}
peerDependencies:
vite: ^2.5.10
dependencies:
vite: 2.7.10_sass@1.47.0
dev: true
/@volar/code-gen/0.28.10:
resolution: {integrity: sha512-MybgBubg1im4MiFoiTUMmxKTC+KZJQfIO5g/TVnysEsCr4ssG0lG1rF3Gg3lbQKefdMiqsH5FNuMyqLC/bsWQg==}
dependencies:
'@volar/shared': 0.28.10
'@volar/source-map': 0.28.10
dev: true
/@volar/html2pug/0.28.10:
resolution: {integrity: sha512-orcNnKyUPZZVb7pRvRHU7R8gk4abKZQELT0zXt2T7EbC5B8usmWNav6Sis9kVzV5Etj5h/IYutv7Df7PiKwLOQ==}
dependencies:
domelementtype: 2.2.0
domhandler: 4.3.0
htmlparser2: 7.2.0
pug: 3.0.2
dev: true
/@volar/shared/0.28.10:
resolution: {integrity: sha512-MzBEfBM5E5q4EfOd8Gkqmo+XTfbXiuT8IEWtfmpS8ax3GVeofkeAgzK/TadkatW/Nb2cKOaCYkmILpFKvDnDRQ==}
dependencies:
upath: 2.0.1
vscode-jsonrpc: 8.0.0-next.4
vscode-uri: 3.0.3
dev: true
/@volar/source-map/0.28.10:
resolution: {integrity: sha512-hQ2gclwP7yvZIdaVEC1LixViDPIO6JGkCBxAS8Erg9p2d0ruTyzazfd0NLaLuHLoMnxExILYNK2W05yQmIpRIA==}
dependencies:
'@volar/shared': 0.28.10
dev: true
/@volar/transforms/0.28.10:
resolution: {integrity: sha512-GOQN3amI733oFweKKjuBBOEOMwy0e/aEAnnJNavrrHa7LY6Ke/JfNsoWhi9Pb2FAPYd+WyruDDFX8yKHjQE1xw==}
dependencies:
'@volar/shared': 0.28.10
vscode-languageserver: 8.0.0-next.5
dev: true
/@vscode/emmet-helper/2.8.3:
resolution: {integrity: sha512-dkTSL+BaBBS8gFgPm/GMOU+XfxaMyI+Fl1IUYxEi8Iv24RfHf9/q2eCpV2hs7sncLcoKWEbMYe5gv4Ppmp2Oxw==}
dependencies:
emmet: 2.3.5
jsonc-parser: 2.3.1
vscode-languageserver-textdocument: 1.0.3
vscode-languageserver-types: 3.16.0
vscode-nls: 5.0.0
vscode-uri: 2.1.2
dev: true
/@vue/babel-helper-vue-transform-on/1.0.2:
resolution: {integrity: sha512-hz4R8tS5jMn8lDq6iD+yWL6XNB699pGIVLk7WSJnn1dbpjaazsjZQkieJoRX6gW5zpYSCFqQ7jUquPNY65tQYA==}
dev: true
/@vue/babel-plugin-jsx/1.1.1_@babel+core@7.16.7:
resolution: {integrity: sha512-j2uVfZjnB5+zkcbc/zsOc0fSNGCMMjaEXP52wdwdIfn0qjFfEYpYZBFKFg+HHnQeJCVrjOeO0YxgaL7DMrym9w==}
dependencies:
'@babel/helper-module-imports': 7.16.7
'@babel/plugin-syntax-jsx': 7.16.7_@babel+core@7.16.7
'@babel/template': 7.16.7
'@babel/traverse': 7.16.7
'@babel/types': 7.16.7
'@vue/babel-helper-vue-transform-on': 1.0.2
camelcase: 6.3.0
html-tags: 3.1.0
svg-tags: 1.0.0
transitivePeerDependencies:
- '@babel/core'
- supports-color
dev: true
/@vue/compiler-core/3.2.26:
resolution: {integrity: sha512-N5XNBobZbaASdzY9Lga2D9Lul5vdCIOXvUMd6ThcN8zgqQhPKfCV+wfAJNNJKQkSHudnYRO2gEB+lp0iN3g2Tw==}
dependencies:
'@babel/parser': 7.16.7
'@vue/shared': 3.2.26
estree-walker: 2.0.2
source-map: 0.6.1
/@vue/compiler-dom/3.2.26:
resolution: {integrity: sha512-smBfaOW6mQDxcT3p9TKT6mE22vjxjJL50GFVJiI0chXYGU/xzC05QRGrW3HHVuJrmLTLx5zBhsZ2dIATERbarg==}
dependencies:
'@vue/compiler-core': 3.2.26
'@vue/shared': 3.2.26
/@vue/compiler-sfc/3.2.26:
resolution: {integrity: sha512-ePpnfktV90UcLdsDQUh2JdiTuhV0Skv2iYXxfNMOK/F3Q+2BO0AulcVcfoksOpTJGmhhfosWfMyEaEf0UaWpIw==}
dependencies:
'@babel/parser': 7.16.7
'@vue/compiler-core': 3.2.26
'@vue/compiler-dom': 3.2.26
'@vue/compiler-ssr': 3.2.26
'@vue/reactivity-transform': 3.2.26
'@vue/shared': 3.2.26
estree-walker: 2.0.2
magic-string: 0.25.7
postcss: 8.4.5
source-map: 0.6.1
dev: false
/@vue/compiler-ssr/3.2.26:
resolution: {integrity: sha512-2mywLX0ODc4Zn8qBoA2PDCsLEZfpUGZcyoFRLSOjyGGK6wDy2/5kyDOWtf0S0UvtoyVq95OTSGIALjZ4k2q/ag==}
dependencies:
'@vue/compiler-dom': 3.2.26
'@vue/shared': 3.2.26
dev: false
/@vue/devtools-api/6.0.0-beta.21.1:
resolution: {integrity: sha512-FqC4s3pm35qGVeXRGOjTsRzlkJjrBLriDS9YXbflHLsfA9FrcKzIyWnLXoNm+/7930E8rRakXuAc2QkC50swAw==}
dev: false
/@vue/reactivity-transform/3.2.26:
resolution: {integrity: sha512-XKMyuCmzNA7nvFlYhdKwD78rcnmPb7q46uoR00zkX6yZrUmcCQ5OikiwUEVbvNhL5hBJuvbSO95jB5zkUon+eQ==}
dependencies:
'@babel/parser': 7.16.7
'@vue/compiler-core': 3.2.26
'@vue/shared': 3.2.26
estree-walker: 2.0.2
magic-string: 0.25.7
dev: false
/@vue/reactivity/3.2.26:
resolution: {integrity: sha512-h38bxCZLW6oFJVDlCcAiUKFnXI8xP8d+eO0pcDxx+7dQfSPje2AO6M9S9QO6MrxQB7fGP0DH0dYQ8ksf6hrXKQ==}
dependencies:
'@vue/shared': 3.2.26
/@vue/runtime-core/3.2.26:
resolution: {integrity: sha512-BcYi7qZ9Nn+CJDJrHQ6Zsmxei2hDW0L6AB4vPvUQGBm2fZyC0GXd/4nVbyA2ubmuhctD5RbYY8L+5GUJszv9mQ==}
dependencies:
'@vue/reactivity': 3.2.26
'@vue/shared': 3.2.26
dev: false
/@vue/runtime-dom/3.2.26:
resolution: {integrity: sha512-dY56UIiZI+gjc4e8JQBwAifljyexfVCkIAu/WX8snh8vSOt/gMSEGwPRcl2UpYpBYeyExV8WCbgvwWRNt9cHhQ==}
dependencies:
'@vue/runtime-core': 3.2.26
'@vue/shared': 3.2.26
csstype: 2.6.19
dev: false
/@vue/server-renderer/3.2.26_vue@3.2.26:
resolution: {integrity: sha512-Jp5SggDUvvUYSBIvYEhy76t4nr1vapY/FIFloWmQzn7UxqaHrrBpbxrqPcTrSgGrcaglj0VBp22BKJNre4aA1w==}
peerDependencies:
vue: 3.2.26
dependencies:
'@vue/compiler-ssr': 3.2.26
'@vue/shared': 3.2.26
vue: 3.2.26
dev: false
/@vue/shared/3.2.26:
resolution: {integrity: sha512-vPV6Cq+NIWbH5pZu+V+2QHE9y1qfuTq49uNWw4f7FDEeZaDU2H2cx5jcUZOAKW7qTrUS4k6qZPbMy1x4N96nbA==}
/@vueuse/core/7.5.3_vue@3.2.26:
resolution: {integrity: sha512-D9j5ymHFMFRXQqCp0yZJkf/bvBGiz0MrKUa364p+L8dMyd5zyq2K1JmHyvoBd4xbTFRfmQ1h878u6YE5LCkDVQ==}
peerDependencies:
'@vue/composition-api': ^1.1.0
vue: ^2.6.0 || ^3.2.0
peerDependenciesMeta:
'@vue/composition-api':
optional: true
vue:
optional: true
dependencies:
'@vueuse/shared': 7.5.3_vue@3.2.26
vue: 3.2.26
vue-demi: 0.12.1_vue@3.2.26
dev: false
/@vueuse/shared/7.5.3_vue@3.2.26:
resolution: {integrity: sha512-BJ71cxHN5VByW1S58Gl85NFJaQu93F7Vs7K/MuAKsIIuHm9PBbkR5Vxkg9ko9cBdiKVt+FNoo13BhdbA+Vwycg==}
peerDependencies:
'@vue/composition-api': ^1.1.0
vue: ^2.6.0 || ^3.2.0
peerDependenciesMeta:
'@vue/composition-api':
optional: true
vue:
optional: true
dependencies:
vue: 3.2.26
vue-demi: 0.12.1_vue@3.2.26
dev: false
/acorn-jsx/5.3.2_acorn@8.7.0:
resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
peerDependencies:
acorn: ^6.0.0 || ^7.0.0 || ^8.0.0
dependencies:
acorn: 8.7.0
dev: true
/acorn/7.4.1:
resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==}
engines: {node: '>=0.4.0'}
hasBin: true
dev: true
/acorn/8.7.0:
resolution: {integrity: sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==}
engines: {node: '>=0.4.0'}
hasBin: true
dev: true
/ajv/6.12.6:
resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
dependencies:
fast-deep-equal: 3.1.3
fast-json-stable-stringify: 2.1.0
json-schema-traverse: 0.4.1
uri-js: 4.4.1
dev: true
/ansi-colors/4.1.1:
resolution: {integrity: sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==}
engines: {node: '>=6'}
dev: true
/ansi-regex/5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
/ansi-styles/3.2.1:
resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==}
engines: {node: '>=4'}
dependencies:
color-convert: 1.9.3
dev: true
/ansi-styles/4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
dependencies:
color-convert: 2.0.1
/ansi-styles/5.2.0:
resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==}
engines: {node: '>=10'}
dev: false
/anymatch/3.1.2:
resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==}
engines: {node: '>= 8'}
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
dev: true
/argparse/2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
dev: true
/array-union/2.1.0:
resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==}
engines: {node: '>=8'}
dev: true
/asap/2.0.6:
resolution: {integrity: sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=}
dev: true
/assert-never/1.2.1:
resolution: {integrity: sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw==}
dev: true
/async-validator/4.0.7:
resolution: {integrity: sha512-Pj2IR7u8hmUEDOwB++su6baaRi+QvsgajuFB9j95foM1N2gy5HM4z60hfusIO0fBPG5uLAEl6yCJr1jNSVugEQ==}
dev: false
/atob/2.1.2:
resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==}
engines: {node: '>= 4.5.0'}
hasBin: true
dev: true
/axios/0.24.0:
resolution: {integrity: sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==}
dependencies:
follow-redirects: 1.14.6
transitivePeerDependencies:
- debug
dev: false
/babel-walk/3.0.0-canary-5:
resolution: {integrity: sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==}
engines: {node: '>= 10.0.0'}
dependencies:
'@babel/types': 7.16.7
dev: true
/balanced-match/1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
dev: true
/big.js/3.2.0:
resolution: {integrity: sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q==}
dev: true
/binary-extensions/2.2.0:
resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==}
engines: {node: '>=8'}
dev: true
/brace-expansion/1.1.11:
resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
dev: true
/braces/3.0.2:
resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==}
engines: {node: '>=8'}
dependencies:
fill-range: 7.0.1
dev: true
/browserslist/4.19.1:
resolution: {integrity: sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==}
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
hasBin: true
dependencies:
caniuse-lite: 1.0.30001297
electron-to-chromium: 1.4.38
escalade: 3.1.1
node-releases: 2.0.1
picocolors: 1.0.0
dev: true
/call-bind/1.0.2:
resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==}
dependencies:
function-bind: 1.1.1
get-intrinsic: 1.1.1
/callsites/3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
dev: true
/camelcase/6.3.0:
resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==}
engines: {node: '>=10'}
dev: true
/caniuse-lite/1.0.30001297:
resolution: {integrity: sha512-6bbIbowYG8vFs/Lk4hU9jFt7NknGDleVAciK916tp6ft1j+D//ZwwL6LbF1wXMQ32DMSjeuUV8suhh6dlmFjcA==}
dev: true
/chalk/2.4.2:
resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==}
engines: {node: '>=4'}
dependencies:
ansi-styles: 3.2.1
escape-string-regexp: 1.0.5
supports-color: 5.5.0
dev: true
/chalk/4.1.2:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
dependencies:
ansi-styles: 4.3.0
supports-color: 7.2.0
/character-parser/2.2.0:
resolution: {integrity: sha1-x84o821LzZdE5f/CxfzeHHMmH8A=}
dependencies:
is-regex: 1.1.4
dev: true
/chokidar/3.5.2:
resolution: {integrity: sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==}
engines: {node: '>= 8.10.0'}
dependencies:
anymatch: 3.1.2
braces: 3.0.2
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.2
dev: true
/color-convert/1.9.3:
resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==}
dependencies:
color-name: 1.1.3
dev: true
/color-convert/2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
dependencies:
color-name: 1.1.4
/color-name/1.1.3:
resolution: {integrity: sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=}
dev: true
/color-name/1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
/concat-map/0.0.1:
resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=}
dev: true
/constantinople/4.0.1:
resolution: {integrity: sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==}
dependencies:
'@babel/parser': 7.16.7
'@babel/types': 7.16.7
dev: true
/convert-source-map/1.8.0:
resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==}
dependencies:
safe-buffer: 5.1.2
dev: true
/copy-anything/2.0.3:
resolution: {integrity: sha512-GK6QUtisv4fNS+XcI7shX0Gx9ORg7QqIznyfho79JTnX1XhLiyZHfftvGiziqzRiEi/Bjhgpi+D2o7HxJFPnDQ==}
dependencies:
is-what: 3.14.1
dev: true
/cross-spawn/7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'}
dependencies:
path-key: 3.1.1
shebang-command: 2.0.0
which: 2.0.2
dev: true
/css-parse/2.0.0:
resolution: {integrity: sha1-pGjuZnwW2BzPBcWMONKpfHgNv9Q=}
dependencies:
css: 2.2.4
dev: true
/css-render/0.15.8:
resolution: {integrity: sha512-k1gp1MgYDPrFZhzheQkSwm6dmP6nPe2XE6WYpJBPwEc3GbMANPJZfxl7ofZlTl8/+tpMRiGTTgUkTlXaVbLxog==}
dependencies:
'@emotion/hash': 0.8.0
'@types/node': 14.14.45
csstype: 3.0.10
dev: false
/css-selector-tokenizer/0.7.3:
resolution: {integrity: sha512-jWQv3oCEL5kMErj4wRnK/OPoBi0D+P1FR2cDCKYPaMeD2eW3/mttav8HT4hT1CKopiJI/psEULjkClhvJo4Lvg==}
dependencies:
cssesc: 3.0.0
fastparse: 1.1.2
dev: true
/css/2.2.4:
resolution: {integrity: sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw==}
dependencies:
inherits: 2.0.4
source-map: 0.6.1
source-map-resolve: 0.5.3
urix: 0.1.0
dev: true
/cssesc/3.0.0:
resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==}
engines: {node: '>=4'}
hasBin: true
dev: true
/csstype/2.6.19:
resolution: {integrity: sha512-ZVxXaNy28/k3kJg0Fou5MiYpp88j7H9hLZp8PDC3jV0WFjfH5E9xHb56L0W59cPbKbcHXeP4qyT8PrHp8t6LcQ==}
dev: false
/csstype/3.0.10:
resolution: {integrity: sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA==}
dev: false
/dart-sass/1.25.0:
resolution: {integrity: sha512-syNOAstJXAmvD3RifcDk3fiPMyYE2fY8so6w9gf2/wNlKpG0zyH+oiXubEYVOy1WAWkzOc72pbAxwx+3OU4JJA==}
engines: {node: '>=8.9.0'}
deprecated: This package has been renamed to 'sass'.
hasBin: true
dependencies:
chokidar: 3.5.2
dev: true
/date-fns-tz/1.2.2_date-fns@2.28.0:
resolution: {integrity: sha512-vWtn44eEqnLbkACb7T5G5gPgKR4nY8NkNMOCyoY49NsRGHrcDmY2aysCyzDeA+u+vcDBn/w6nQqEDyouRs4m8w==}
peerDependencies:
date-fns: '>=2.0.0'
dependencies:
date-fns: 2.28.0
dev: false
/date-fns/2.28.0:
resolution: {integrity: sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw==}
engines: {node: '>=0.11'}
dev: false
/debug/3.1.0:
resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==}
dependencies:
ms: 2.0.0
dev: true
/debug/3.2.7:
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
dependencies:
ms: 2.1.3
dev: true
optional: true
/debug/4.3.3:
resolution: {integrity: sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies:
ms: 2.1.2
dev: true
/decode-uri-component/0.2.0:
resolution: {integrity: sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=}
engines: {node: '>=0.10'}
dev: true
/deep-is/0.1.4:
resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
dev: true
/diff-sequences/27.4.0:
resolution: {integrity: sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==}
engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0}
dev: false
/dir-glob/3.0.1:
resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==}
engines: {node: '>=8'}
dependencies:
path-type: 4.0.0
dev: true
/doctrine/3.0.0:
resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
engines: {node: '>=6.0.0'}
dependencies:
esutils: 2.0.3
dev: true
/doctypes/1.1.0:
resolution: {integrity: sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk=}
dev: true
/dom-serializer/1.3.2:
resolution: {integrity: sha512-5c54Bk5Dw4qAxNOI1pFEizPSjVsx5+bpJKmL2kPn8JhBUq2q09tTCa3mjijun2NfK78NMouDYNMBkOrPZiS+ig==}
dependencies:
domelementtype: 2.2.0
domhandler: 4.3.0
entities: 2.2.0
dev: true
/domelementtype/2.2.0:
resolution: {integrity: sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==}
dev: true
/domhandler/4.3.0:
resolution: {integrity: sha512-fC0aXNQXqKSFTr2wDNZDhsEYjCiYsDWl3D01kwt25hm1YIPyDGHvvi3rw+PLqHAl/m71MaiF7d5zvBr0p5UB2g==}
engines: {node: '>= 4'}
dependencies:
domelementtype: 2.2.0
dev: true
/domutils/2.8.0:
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
dependencies:
dom-serializer: 1.3.2
domelementtype: 2.2.0
domhandler: 4.3.0
dev: true
/dotenv/10.0.0:
resolution: {integrity: sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==}
engines: {node: '>=10'}
dev: true
/echarts/5.2.2:
resolution: {integrity: sha512-yxuBfeIH5c+0FsoRP60w4De6omXhA06c7eUYBsC1ykB6Ys2yK5fSteIYWvkJ4xJVLQgCvAdO8C4mN6MLeJpBaw==}
dependencies:
tslib: 2.3.0
zrender: 5.2.1
dev: false
/electron-to-chromium/1.4.38:
resolution: {integrity: sha512-WhHt3sZazKj0KK/UpgsbGQnUUoFeAHVishzHFExMxagpZgjiGYSC9S0ZlbhCfSH2L2i+2A1yyqOIliTctMx7KQ==}
dev: true
/emmet/2.3.5:
resolution: {integrity: sha512-LcWfTamJnXIdMfLvJEC5Ld3hY5/KHXgv1L1bp6I7eEvB0ZhacHZ1kX0BYovJ8FroEsreLcq7n7kZhRMsf6jkXQ==}
dependencies:
'@emmetio/abbreviation': 2.2.2
'@emmetio/css-abbreviation': 2.1.4
dev: true
/emojis-list/2.1.0:
resolution: {integrity: sha1-TapNnbAPmBmIDHn6RXrlsJof04k=}
engines: {node: '>= 0.10'}
dev: true
/enquirer/2.3.6:
resolution: {integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==}
engines: {node: '>=8.6'}
dependencies:
ansi-colors: 4.1.1
dev: true
/entities/2.2.0:
resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==}
dev: true
/entities/3.0.1:
resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==}
engines: {node: '>=0.12'}
dev: true
/errno/0.1.8:
resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==}
hasBin: true
requiresBuild: true
dependencies:
prr: 1.0.1
dev: true
optional: true
/esbuild-android-arm64/0.13.15:
resolution: {integrity: sha512-m602nft/XXeO8YQPUDVoHfjyRVPdPgjyyXOxZ44MK/agewFFkPa8tUo6lAzSWh5Ui5PB4KR9UIFTSBKh/RrCmg==}
cpu: [arm64]
os: [android]
requiresBuild: true
dev: true
optional: true
/esbuild-darwin-64/0.13.15:
resolution: {integrity: sha512-ihOQRGs2yyp7t5bArCwnvn2Atr6X4axqPpEdCFPVp7iUj4cVSdisgvEKdNR7yH3JDjW6aQDw40iQFoTqejqxvQ==}
cpu: [x64]
os: [darwin]
requiresBuild: true
dev: true
optional: true
/esbuild-darwin-arm64/0.13.15:
resolution: {integrity: sha512-i1FZssTVxUqNlJ6cBTj5YQj4imWy3m49RZRnHhLpefFIh0To05ow9DTrXROTE1urGTQCloFUXTX8QfGJy1P8dQ==}
cpu: [arm64]
os: [darwin]
requiresBuild: true
dev: true
optional: true
/esbuild-freebsd-64/0.13.15:
resolution: {integrity: sha512-G3dLBXUI6lC6Z09/x+WtXBXbOYQZ0E8TDBqvn7aMaOCzryJs8LyVXKY4CPnHFXZAbSwkCbqiPuSQ1+HhrNk7EA==}
cpu: [x64]
os: [freebsd]
requiresBuild: true
dev: true
optional: true
/esbuild-freebsd-arm64/0.13.15:
resolution: {integrity: sha512-KJx0fzEDf1uhNOZQStV4ujg30WlnwqUASaGSFPhznLM/bbheu9HhqZ6mJJZM32lkyfGJikw0jg7v3S0oAvtvQQ==}
cpu: [arm64]
os: [freebsd]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-32/0.13.15:
resolution: {integrity: sha512-ZvTBPk0YWCLMCXiFmD5EUtB30zIPvC5Itxz0mdTu/xZBbbHJftQgLWY49wEPSn2T/TxahYCRDWun5smRa0Tu+g==}
cpu: [ia32]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-64/0.13.15:
resolution: {integrity: sha512-eCKzkNSLywNeQTRBxJRQ0jxRCl2YWdMB3+PkWFo2BBQYC5mISLIVIjThNtn6HUNqua1pnvgP5xX0nHbZbPj5oA==}
cpu: [x64]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-arm/0.13.15:
resolution: {integrity: sha512-wUHttDi/ol0tD8ZgUMDH8Ef7IbDX+/UsWJOXaAyTdkT7Yy9ZBqPg8bgB/Dn3CZ9SBpNieozrPRHm0BGww7W/jA==}
cpu: [arm]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-arm64/0.13.15:
resolution: {integrity: sha512-bYpuUlN6qYU9slzr/ltyLTR9YTBS7qUDymO8SV7kjeNext61OdmqFAzuVZom+OLW1HPHseBfJ/JfdSlx8oTUoA==}
cpu: [arm64]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-mips64le/0.13.15:
resolution: {integrity: sha512-KlVjIG828uFPyJkO/8gKwy9RbXhCEUeFsCGOJBepUlpa7G8/SeZgncUEz/tOOUJTcWMTmFMtdd3GElGyAtbSWg==}
cpu: [mips64el]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-linux-ppc64le/0.13.15:
resolution: {integrity: sha512-h6gYF+OsaqEuBjeesTBtUPw0bmiDu7eAeuc2OEH9S6mV9/jPhPdhOWzdeshb0BskRZxPhxPOjqZ+/OqLcxQwEQ==}
cpu: [ppc64]
os: [linux]
requiresBuild: true
dev: true
optional: true
/esbuild-netbsd-64/0.13.15:
resolution: {integrity: sha512-3+yE9emwoevLMyvu+iR3rsa+Xwhie7ZEHMGDQ6dkqP/ndFzRHkobHUKTe+NCApSqG5ce2z4rFu+NX/UHnxlh3w==}
cpu: [x64]
os: [netbsd]
requiresBuild: true
dev: true
optional: true
/esbuild-openbsd-64/0.13.15:
resolution: {integrity: sha512-wTfvtwYJYAFL1fSs8yHIdf5GEE4NkbtbXtjLWjM3Cw8mmQKqsg8kTiqJ9NJQe5NX/5Qlo7Xd9r1yKMMkHllp5g==}
cpu: [x64]
os: [openbsd]
requiresBuild: true
dev: true
optional: true
/esbuild-sunos-64/0.13.15:
resolution: {integrity: sha512-lbivT9Bx3t1iWWrSnGyBP9ODriEvWDRiweAs69vI+miJoeKwHWOComSRukttbuzjZ8r1q0mQJ8Z7yUsDJ3hKdw==}
cpu: [x64]
os: [sunos]
requiresBuild: true
dev: true
optional: true
/esbuild-windows-32/0.13.15:
resolution: {integrity: sha512-fDMEf2g3SsJ599MBr50cY5ve5lP1wyVwTe6aLJsM01KtxyKkB4UT+fc5MXQFn3RLrAIAZOG+tHC+yXObpSn7Nw==}
cpu: [ia32]
os: [win32]
requiresBuild: true
dev: true
optional: true
/esbuild-windows-64/0.13.15:
resolution: {integrity: sha512-9aMsPRGDWCd3bGjUIKG/ZOJPKsiztlxl/Q3C1XDswO6eNX/Jtwu4M+jb6YDH9hRSUflQWX0XKAfWzgy5Wk54JQ==}
cpu: [x64]
os: [win32]
requiresBuild: true
dev: true
optional: true
/esbuild-windows-arm64/0.13.15:
resolution: {integrity: sha512-zzvyCVVpbwQQATaf3IG8mu1IwGEiDxKkYUdA4FpoCHi1KtPa13jeScYDjlW0Qh+ebWzpKfR2ZwvqAQkSWNcKjA==}
cpu: [arm64]
os: [win32]
requiresBuild: true
dev: true
optional: true
/esbuild/0.13.15:
resolution: {integrity: sha512-raCxt02HBKv8RJxE8vkTSCXGIyKHdEdGfUmiYb8wnabnaEmHzyW7DCHb5tEN0xU8ryqg5xw54mcwnYkC4x3AIw==}
hasBin: true
requiresBuild: true
optionalDependencies:
esbuild-android-arm64: 0.13.15
esbuild-darwin-64: 0.13.15
esbuild-darwin-arm64: 0.13.15
esbuild-freebsd-64: 0.13.15
esbuild-freebsd-arm64: 0.13.15
esbuild-linux-32: 0.13.15
esbuild-linux-64: 0.13.15
esbuild-linux-arm: 0.13.15
esbuild-linux-arm64: 0.13.15
esbuild-linux-mips64le: 0.13.15
esbuild-linux-ppc64le: 0.13.15
esbuild-netbsd-64: 0.13.15
esbuild-openbsd-64: 0.13.15
esbuild-sunos-64: 0.13.15
esbuild-windows-32: 0.13.15
esbuild-windows-64: 0.13.15
esbuild-windows-arm64: 0.13.15
dev: true
/escalade/3.1.1:
resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
engines: {node: '>=6'}
dev: true
/escape-string-regexp/1.0.5:
resolution: {integrity: sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=}
engines: {node: '>=0.8.0'}
dev: true
/escape-string-regexp/4.0.0:
resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
engines: {node: '>=10'}
dev: true
/eslint-config-prettier/8.3.0_eslint@8.6.0:
resolution: {integrity: sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew==}
hasBin: true
peerDependencies:
eslint: '>=7.0.0'
dependencies:
eslint: 8.6.0
dev: true
/eslint-plugin-prettier/4.0.0_1c588f61426b1faf18812943f1678311:
resolution: {integrity: sha512-98MqmCJ7vJodoQK359bqQWaxOE0CS8paAz/GgjaZLyex4TTk3g9HugoO89EqWCrFiOqn9EVvcoo7gZzONCWVwQ==}
engines: {node: '>=6.0.0'}
peerDependencies:
eslint: '>=7.28.0'
eslint-config-prettier: '*'
prettier: '>=2.0.0'
peerDependenciesMeta:
eslint-config-prettier:
optional: true
dependencies:
eslint: 8.6.0
eslint-config-prettier: 8.3.0_eslint@8.6.0
prettier: 2.5.1
prettier-linter-helpers: 1.0.0
dev: true
/eslint-plugin-vue/8.2.0_eslint@8.6.0:
resolution: {integrity: sha512-cLIdTuOAMXyHeQ4drYKcZfoyzdwdBpH279X8/N0DgmotEI9yFKb5O/cAgoie/CkQZCH/MOmh0xw/KEfS90zY2A==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: ^6.2.0 || ^7.0.0 || ^8.0.0
dependencies:
eslint: 8.6.0
eslint-utils: 3.0.0_eslint@8.6.0
natural-compare: 1.4.0
semver: 7.3.5
vue-eslint-parser: 8.0.1_eslint@8.6.0
transitivePeerDependencies:
- supports-color
dev: true
/eslint-scope/5.1.1:
resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==}
engines: {node: '>=8.0.0'}
dependencies:
esrecurse: 4.3.0
estraverse: 4.3.0
dev: true
/eslint-scope/6.0.0:
resolution: {integrity: sha512-uRDL9MWmQCkaFus8RF5K9/L/2fn+80yoW3jkD53l4shjCh26fCtvJGasxjUqP5OT87SYTxCVA3BwTUzuELx9kA==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
esrecurse: 4.3.0
estraverse: 5.3.0
dev: true
/eslint-scope/7.1.0:
resolution: {integrity: sha512-aWwkhnS0qAXqNOgKOK0dJ2nvzEbhEvpy8OlJ9kZ0FeZnA6zpjv1/Vei+puGFFX7zkPCkHHXb7IDX3A+7yPrRWg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
esrecurse: 4.3.0
estraverse: 5.3.0
dev: true
/eslint-utils/3.0.0_eslint@8.6.0:
resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==}
engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0}
peerDependencies:
eslint: '>=5'
dependencies:
eslint: 8.6.0
eslint-visitor-keys: 2.1.0
dev: true
/eslint-visitor-keys/2.1.0:
resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==}
engines: {node: '>=10'}
dev: true
/eslint-visitor-keys/3.1.0:
resolution: {integrity: sha512-yWJFpu4DtjsWKkt5GeNBBuZMlNcYVs6vRCLoCVEJrTjaSB6LC98gFipNK/erM2Heg/E8mIK+hXG/pJMLK+eRZA==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dev: true
/eslint/8.6.0:
resolution: {integrity: sha512-UvxdOJ7mXFlw7iuHZA4jmzPaUqIw54mZrv+XPYKNbKdLR0et4rf60lIZUU9kiNtnzzMzGWxMV+tQ7uG7JG8DPw==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
hasBin: true
dependencies:
'@eslint/eslintrc': 1.0.5
'@humanwhocodes/config-array': 0.9.2
ajv: 6.12.6
chalk: 4.1.2
cross-spawn: 7.0.3
debug: 4.3.3
doctrine: 3.0.0
enquirer: 2.3.6
escape-string-regexp: 4.0.0
eslint-scope: 7.1.0
eslint-utils: 3.0.0_eslint@8.6.0
eslint-visitor-keys: 3.1.0
espree: 9.3.0
esquery: 1.4.0
esutils: 2.0.3
fast-deep-equal: 3.1.3
file-entry-cache: 6.0.1
functional-red-black-tree: 1.0.1
glob-parent: 6.0.2
globals: 13.12.0
ignore: 4.0.6
import-fresh: 3.3.0
imurmurhash: 0.1.4
is-glob: 4.0.3
js-yaml: 4.1.0
json-stable-stringify-without-jsonify: 1.0.1
levn: 0.4.1
lodash.merge: 4.6.2
minimatch: 3.0.4
natural-compare: 1.4.0
optionator: 0.9.1
progress: 2.0.3
regexpp: 3.2.0
semver: 7.3.5
strip-ansi: 6.0.1
strip-json-comments: 3.1.1
text-table: 0.2.0
v8-compile-cache: 2.3.0
transitivePeerDependencies:
- supports-color
dev: true
/espree/9.3.0:
resolution: {integrity: sha512-d/5nCsb0JcqsSEeQzFZ8DH1RmxPcglRWh24EFTlUEmCKoehXGdpsx0RkHDubqUI8LSAIKMQp4r9SzQ3n+sm4HQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
acorn: 8.7.0
acorn-jsx: 5.3.2_acorn@8.7.0
eslint-visitor-keys: 3.1.0
dev: true
/esquery/1.4.0:
resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==}
engines: {node: '>=0.10'}
dependencies:
estraverse: 5.3.0
dev: true
/esrecurse/4.3.0:
resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==}
engines: {node: '>=4.0'}
dependencies:
estraverse: 5.3.0
dev: true
/estraverse/4.3.0:
resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==}
engines: {node: '>=4.0'}
dev: true
/estraverse/5.3.0:
resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
engines: {node: '>=4.0'}
dev: true
/estree-walker/2.0.2:
resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
/esutils/2.0.3:
resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
engines: {node: '>=0.10.0'}
dev: true
/evtd/0.2.3:
resolution: {integrity: sha512-tmiT1YUVqFjTY+BSBOAskL83xNx41iUfpvKP6Gcd/xMHjg3mnER98jXGXJyKnxCG19uPc6EhZiUC+MUyvoqCtw==}
dev: false
/fast-deep-equal/3.1.3:
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
dev: true
/fast-diff/1.2.0:
resolution: {integrity: sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==}
dev: true
/fast-glob/3.2.9:
resolution: {integrity: sha512-MBwILhhD92sziIrMQwpqcuGERF+BH99ei2a3XsGJuqEKcSycAL+w0HWokFenZXona+kjFr82Lf71eTxNRC06XQ==}
engines: {node: '>=8.6.0'}
dependencies:
'@nodelib/fs.stat': 2.0.5
'@nodelib/fs.walk': 1.2.8
glob-parent: 5.1.2
merge2: 1.4.1
micromatch: 4.0.4
dev: true
/fast-json-stable-stringify/2.1.0:
resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==}
dev: true
/fast-levenshtein/2.0.6:
resolution: {integrity: sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=}
dev: true
/fastparse/1.1.2:
resolution: {integrity: sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==}
dev: true
/fastq/1.13.0:
resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==}
dependencies:
reusify: 1.0.4
dev: true
/file-entry-cache/6.0.1:
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
engines: {node: ^10.12.0 || >=12.0.0}
dependencies:
flat-cache: 3.0.4
dev: true
/fill-range/7.0.1:
resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
engines: {node: '>=8'}
dependencies:
to-regex-range: 5.0.1
dev: true
/flat-cache/3.0.4:
resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==}
engines: {node: ^10.12.0 || >=12.0.0}
dependencies:
flatted: 3.2.4
rimraf: 3.0.2
dev: true
/flatted/3.2.4:
resolution: {integrity: sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw==}
dev: true
/follow-redirects/1.14.6:
resolution: {integrity: sha512-fhUl5EwSJbbl8AR+uYL2KQDxLkdSjZGR36xy46AO7cOMTrCMON6Sa28FmAnC2tRTDbd/Uuzz3aJBv7EBN7JH8A==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
dev: false
/fs-extra/10.0.0:
resolution: {integrity: sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==}
engines: {node: '>=12'}
dependencies:
graceful-fs: 4.2.9
jsonfile: 6.1.0
universalify: 2.0.0
dev: true
/fs.realpath/1.0.0:
resolution: {integrity: sha1-FQStJSMVjKpA20onh8sBQRmU6k8=}
dev: true
/fsevents/2.3.2:
resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
requiresBuild: true
dev: true
optional: true
/function-bind/1.1.1:
resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==}
/functional-red-black-tree/1.0.1:
resolution: {integrity: sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=}
dev: true
/generic-names/1.0.3:
resolution: {integrity: sha1-LXhqEhruUIh2eWk56OO/+DbCCRc=}
dependencies:
loader-utils: 0.2.17
dev: true
/gensync/1.0.0-beta.2:
resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
engines: {node: '>=6.9.0'}
dev: true
/get-intrinsic/1.1.1:
resolution: {integrity: sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==}
dependencies:
function-bind: 1.1.1
has: 1.0.3
has-symbols: 1.0.2
/glob-parent/5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
dependencies:
is-glob: 4.0.3
dev: true
/glob-parent/6.0.2:
resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
engines: {node: '>=10.13.0'}
dependencies:
is-glob: 4.0.3
dev: true
/glob/7.2.0:
resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==}
dependencies:
fs.realpath: 1.0.0
inflight: 1.0.6
inherits: 2.0.4
minimatch: 3.0.4
once: 1.4.0
path-is-absolute: 1.0.1
dev: true
/globals/11.12.0:
resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==}
engines: {node: '>=4'}
dev: true
/globals/13.12.0:
resolution: {integrity: sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==}
engines: {node: '>=8'}
dependencies:
type-fest: 0.20.2
dev: true
/globby/11.1.0:
resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==}
engines: {node: '>=10'}
dependencies:
array-union: 2.1.0
dir-glob: 3.0.1
fast-glob: 3.2.9
ignore: 5.2.0
merge2: 1.4.1
slash: 3.0.0
dev: true
/graceful-fs/4.2.9:
resolution: {integrity: sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==}
dev: true
/has-flag/3.0.0:
resolution: {integrity: sha1-tdRU3CGZriJWmfNGfloH87lVuv0=}
engines: {node: '>=4'}
dev: true
/has-flag/4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
/has-symbols/1.0.2:
resolution: {integrity: sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==}
engines: {node: '>= 0.4'}
/has-tostringtag/1.0.0:
resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==}
engines: {node: '>= 0.4'}
dependencies:
has-symbols: 1.0.2
dev: true
/has/1.0.3:
resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==}
engines: {node: '>= 0.4.0'}
dependencies:
function-bind: 1.1.1
/hash-sum/2.0.0:
resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==}
dev: true
/highlight.js/11.4.0:
resolution: {integrity: sha512-nawlpCBCSASs7EdvZOYOYVkJpGmAOKMYZgZtUqSRqodZE0GRVcFKwo1RcpeOemqh9hyttTdd5wDBwHkuSyUfnA==}
engines: {node: '>=12.0.0'}
dev: false
/html-tags/3.1.0:
resolution: {integrity: sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==}
engines: {node: '>=8'}
dev: true
/htmlparser2/7.2.0:
resolution: {integrity: sha512-H7MImA4MS6cw7nbyURtLPO1Tms7C5H602LRETv95z1MxO/7CP7rDVROehUYeYBUYEON94NXXDEPmZuq+hX4sog==}
dependencies:
domelementtype: 2.2.0
domhandler: 4.3.0
domutils: 2.8.0
entities: 3.0.1
dev: true
/iconv-lite/0.4.24:
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
engines: {node: '>=0.10.0'}
dependencies:
safer-buffer: 2.1.2
dev: true
optional: true
/icss-utils/3.0.1:
resolution: {integrity: sha1-7nDTroysOMa+XtkehRsn7tNDrQ8=}
dependencies:
postcss: 6.0.23
dev: true
/icss-utils/5.1.0_postcss@8.4.5:
resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
dependencies:
postcss: 8.4.5
dev: true
/ignore/4.0.6:
resolution: {integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==}
engines: {node: '>= 4'}
dev: true
/ignore/5.2.0:
resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==}
engines: {node: '>= 4'}
dev: true
/image-size/0.5.5:
resolution: {integrity: sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=}
engines: {node: '>=0.10.0'}
hasBin: true
requiresBuild: true
dev: true
optional: true
/immutable/4.0.0:
resolution: {integrity: sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw==}
dev: true
/import-fresh/3.3.0:
resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
engines: {node: '>=6'}
dependencies:
parent-module: 1.0.1
resolve-from: 4.0.0
dev: true
/imurmurhash/0.1.4:
resolution: {integrity: sha1-khi5srkoojixPcT7a21XbyMUU+o=}
engines: {node: '>=0.8.19'}
dev: true
/inflight/1.0.6:
resolution: {integrity: sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=}
dependencies:
once: 1.4.0
wrappy: 1.0.2
dev: true
/inherits/2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
dev: true
/is-binary-path/2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
dependencies:
binary-extensions: 2.2.0
dev: true
/is-core-module/2.8.1:
resolution: {integrity: sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==}
dependencies:
has: 1.0.3
dev: true
/is-expression/4.0.0:
resolution: {integrity: sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==}
dependencies:
acorn: 7.4.1
object-assign: 4.1.1
dev: true
/is-extglob/2.1.1:
resolution: {integrity: sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=}
engines: {node: '>=0.10.0'}
dev: true
/is-glob/4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
dependencies:
is-extglob: 2.1.1
dev: true
/is-number/7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
dev: true
/is-promise/2.2.2:
resolution: {integrity: sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==}
dev: true
/is-regex/1.1.4:
resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==}
engines: {node: '>= 0.4'}
dependencies:
call-bind: 1.0.2
has-tostringtag: 1.0.0
dev: true
/is-what/3.14.1:
resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==}
dev: true
/isexe/2.0.0:
resolution: {integrity: sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=}
dev: true
/jest-diff/27.4.6:
resolution: {integrity: sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==}
engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0}
dependencies:
chalk: 4.1.2
diff-sequences: 27.4.0
jest-get-type: 27.4.0
pretty-format: 27.4.6
dev: false
/jest-get-type/27.4.0:
resolution: {integrity: sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==}
engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0}
dev: false
/js-stringify/1.0.2:
resolution: {integrity: sha1-Fzb939lyTyijaCrcYjCufk6Weds=}
dev: true
/js-tokens/4.0.0:
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
dev: true
/js-yaml/4.1.0:
resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
hasBin: true
dependencies:
argparse: 2.0.1
dev: true
/jsesc/2.5.2:
resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==}
engines: {node: '>=4'}
hasBin: true
dev: true
/json-schema-traverse/0.4.1:
resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
dev: true
/json-stable-stringify-without-jsonify/1.0.1:
resolution: {integrity: sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=}
dev: true
/json5/0.5.1:
resolution: {integrity: sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=}
hasBin: true
dev: true
/json5/1.0.1:
resolution: {integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==}
hasBin: true
dependencies:
minimist: 1.2.5
dev: true
/json5/2.2.0:
resolution: {integrity: sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==}
engines: {node: '>=6'}
hasBin: true
dependencies:
minimist: 1.2.5
dev: true
/jsonc-parser/2.3.1:
resolution: {integrity: sha512-H8jvkz1O50L3dMZCsLqiuB2tA7muqbSg1AtGEkN0leAqGjsUzDJir3Zwr02BhqdcITPg3ei3mZ+HjMocAknhhg==}
dev: true
/jsonc-parser/3.0.0:
resolution: {integrity: sha512-fQzRfAbIBnR0IQvftw9FJveWiHp72Fg20giDrHz6TdfB12UH/uue0D3hm57UB5KgAVuniLMCaS8P1IMj9NR7cA==}
dev: true
/jsonfile/6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
dependencies:
universalify: 2.0.0
optionalDependencies:
graceful-fs: 4.2.9
dev: true
/jstransformer/1.0.0:
resolution: {integrity: sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM=}
dependencies:
is-promise: 2.2.2
promise: 7.3.1
dev: true
/klona/2.0.5:
resolution: {integrity: sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==}
engines: {node: '>= 8'}
dev: true
/less/4.1.2:
resolution: {integrity: sha512-EoQp/Et7OSOVu0aJknJOtlXZsnr8XE8KwuzTHOLeVSEx8pVWUICc8Q0VYRHgzyjX78nMEyC/oztWFbgyhtNfDA==}
engines: {node: '>=6'}
hasBin: true
dependencies:
copy-anything: 2.0.3
parse-node-version: 1.0.1
tslib: 2.3.1
optionalDependencies:
errno: 0.1.8
graceful-fs: 4.2.9
image-size: 0.5.5
make-dir: 2.1.0
mime: 1.6.0
needle: 2.9.1
source-map: 0.6.1
dev: true
/levn/0.4.1:
resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
engines: {node: '>= 0.8.0'}
dependencies:
prelude-ls: 1.2.1
type-check: 0.4.0
dev: true
/lilconfig/2.0.4:
resolution: {integrity: sha512-bfTIN7lEsiooCocSISTWXkiWJkRqtL9wYtYy+8EK3Y41qh3mpwPU0ycTOgjdY9ErwXCc8QyrQp82bdL0Xkm9yA==}
engines: {node: '>=10'}
dev: true
/loader-utils/0.2.17:
resolution: {integrity: sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g=}
dependencies:
big.js: 3.2.0
emojis-list: 2.1.0
json5: 0.5.1
object-assign: 4.1.1
dev: true
/lodash-es/4.17.21:
resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==}
dev: false
/lodash.camelcase/4.3.0:
resolution: {integrity: sha1-soqmKIorn8ZRA1x3EfZathkDMaY=}
dev: true
/lodash.merge/4.6.2:
resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
dev: true
/lodash/4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
/lru-cache/6.0.0:
resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
engines: {node: '>=10'}
dependencies:
yallist: 4.0.0
dev: true
/magic-string/0.25.7:
resolution: {integrity: sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==}
dependencies:
sourcemap-codec: 1.4.8
dev: false
/make-dir/2.1.0:
resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==}
engines: {node: '>=6'}
requiresBuild: true
dependencies:
pify: 4.0.1
semver: 5.7.1
dev: true
optional: true
/merge2/1.4.1:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
dev: true
/micromatch/4.0.4:
resolution: {integrity: sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==}
engines: {node: '>=8.6'}
dependencies:
braces: 3.0.2
picomatch: 2.3.1
dev: true
/mime/1.6.0:
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
engines: {node: '>=4'}
hasBin: true
requiresBuild: true
dev: true
optional: true
/minimatch/3.0.4:
resolution: {integrity: sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==}
dependencies:
brace-expansion: 1.1.11
dev: true
/minimist/1.2.5:
resolution: {integrity: sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==}
dev: true
/mkdirp/1.0.4:
resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==}
engines: {node: '>=10'}
hasBin: true
dev: true
/monaco-editor/0.31.1:
resolution: {integrity: sha512-FYPwxGZAeP6mRRyrr5XTGHD9gRXVjy7GUzF4IPChnyt3fS5WrNxIkS8DNujWf6EQy0Zlzpxw8oTVE+mWI2/D1Q==}
dev: false
/ms/2.0.0:
resolution: {integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=}
dev: true
/ms/2.1.2:
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
dev: true
/ms/2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
dev: true
optional: true
/naive-ui/2.23.2_vue@3.2.26:
resolution: {integrity: sha512-x5XtoJrfMcBGqkryJ6RPg3hB7xDqbO+5TfDB1UGIaVsj2fsidn9S9a6yK/jXqKbw+h5h97NqOu+Lf5EH8tAIog==}
peerDependencies:
vue: ^3.0.0
dependencies:
'@css-render/plugin-bem': 0.15.8_css-render@0.15.8
'@css-render/vue3-ssr': 0.15.8_vue@3.2.26
'@types/lodash': 4.14.178
'@types/lodash-es': 4.17.5
async-validator: 4.0.7
css-render: 0.15.8
date-fns: 2.28.0
date-fns-tz: 1.2.2_date-fns@2.28.0
evtd: 0.2.3
highlight.js: 11.4.0
lodash: 4.17.21
lodash-es: 4.17.21
seemly: 0.3.3
treemate: 0.3.10
vdirs: 0.1.7_vue@3.2.26
vfonts: 0.1.0
vooks: 0.2.12_vue@3.2.26
vue: 3.2.26
vueuc: 0.4.19_7fea039e05dc0d0bb67319189a00d1ad
dev: false
/nanoid/3.1.30:
resolution: {integrity: sha512-zJpuPDwOv8D2zq2WRoMe1HsfZthVewpel9CAvTfc/2mBD1uUT/agc5f7GHGWXlYkFvi1mVxe4IjvP2HNrop7nQ==}
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
hasBin: true
/natural-compare/1.4.0:
resolution: {integrity: sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=}
dev: true
/needle/2.9.1:
resolution: {integrity: sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==}
engines: {node: '>= 4.4.x'}
hasBin: true
requiresBuild: true
dependencies:
debug: 3.2.7
iconv-lite: 0.4.24
sax: 1.2.4
dev: true
optional: true
/neo-async/2.6.2:
resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
dev: true
/node-releases/2.0.1:
resolution: {integrity: sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==}
dev: true
/normalize-path/3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
dev: true
/nprogress/0.2.0:
resolution: {integrity: sha1-y480xTIT2JVyP8urkH6UIq28r7E=}
dev: false
/object-assign/4.1.1:
resolution: {integrity: sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=}
engines: {node: '>=0.10.0'}
dev: true
/object-inspect/1.12.0:
resolution: {integrity: sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==}
dev: false
/once/1.4.0:
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
dependencies:
wrappy: 1.0.2
dev: true
/optionator/0.9.1:
resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==}
engines: {node: '>= 0.8.0'}
dependencies:
deep-is: 0.1.4
fast-levenshtein: 2.0.6
levn: 0.4.1
prelude-ls: 1.2.1
type-check: 0.4.0
word-wrap: 1.2.3
dev: true
/parent-module/1.0.1:
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
engines: {node: '>=6'}
dependencies:
callsites: 3.1.0
dev: true
/parse-node-version/1.0.1:
resolution: {integrity: sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==}
engines: {node: '>= 0.10'}
dev: true
/path-is-absolute/1.0.1:
resolution: {integrity: sha1-F0uSaHNVNP+8es5r9TpanhtcX18=}
engines: {node: '>=0.10.0'}
dev: true
/path-key/3.1.1:
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
engines: {node: '>=8'}
dev: true
/path-parse/1.0.7:
resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
dev: true
/path-type/4.0.0:
resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
engines: {node: '>=8'}
dev: true
/picocolors/1.0.0:
resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==}
/picomatch/2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
dev: true
/pify/4.0.1:
resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==}
engines: {node: '>=6'}
dev: true
optional: true
/pinia-plugin-persistedstate/1.0.3_pinia@2.0.9:
resolution: {integrity: sha512-f+mE3VpnrKcYUZ6zNTpICsZ+tPo/wxXS6i6ECJKV9fx2Ei5hEMrC6dxUrAWK9hKR4Y/MTt3XAWAHiM3lJtP3Jw==}
peerDependencies:
pinia: ^2.0.0
dependencies:
pinia: 2.0.9_typescript@4.5.4+vue@3.2.26
shvl: 2.0.3
dev: false
/pinia/2.0.9_typescript@4.5.4+vue@3.2.26:
resolution: {integrity: sha512-iuYdxLJKQ07YPyOHYH05wNG9eKWqkP/4y4GE8+RqEYtz5fwHgPA5kr6zQbg/DoEJGnR2XCm1w1vdt6ppzL9ATg==}
peerDependencies:
'@vue/composition-api': ^1.4.0
typescript: '>=4.4.4'
vue: ^2.6.14 || ^3.2.0
peerDependenciesMeta:
'@vue/composition-api':
optional: true
typescript:
optional: true
dependencies:
'@vue/devtools-api': 6.0.0-beta.21.1
typescript: 4.5.4
vue: 3.2.26
vue-demi: 0.12.1_vue@3.2.26
dev: false
/postcss-filter-plugins/3.0.1:
resolution: {integrity: sha512-tRKbW4wWBEkSSFuJtamV2wkiV9rj6Yy7P3Y13+zaynlPEEZt8EgYKn3y/RBpMeIhNmHXFlSdzofml65hD5OafA==}
dependencies:
postcss: 6.0.23
dev: true
/postcss-icss-keyframes/0.2.1:
resolution: {integrity: sha1-gMRFXgESsPL5w8Bax1FQYruf8pU=}
dependencies:
icss-utils: 3.0.1
postcss: 6.0.23
postcss-value-parser: 3.3.1
dev: true
/postcss-icss-selectors/2.0.3:
resolution: {integrity: sha1-J/oa/Kq2xgLIZsuymPMhjpvBybM=}
dependencies:
css-selector-tokenizer: 0.7.3
generic-names: 1.0.3
icss-utils: 3.0.1
lodash: 4.17.21
postcss: 6.0.23
dev: true
/postcss-load-config/3.1.1:
resolution: {integrity: sha512-c/9XYboIbSEUZpiD1UQD0IKiUe8n9WHYV7YFe7X7J+ZwCsEKkUJSFWjS9hBU1RR9THR7jMXst8sxiqP0jjo2mg==}
engines: {node: '>= 10'}
peerDependencies:
ts-node: '>=9.0.0'
peerDependenciesMeta:
ts-node:
optional: true
dependencies:
lilconfig: 2.0.4
yaml: 1.10.2
dev: true
/postcss-value-parser/3.3.1:
resolution: {integrity: sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==}
dev: true
/postcss/6.0.23:
resolution: {integrity: sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==}
engines: {node: '>=4.0.0'}
dependencies:
chalk: 2.4.2
source-map: 0.6.1
supports-color: 5.5.0
dev: true
/postcss/8.4.5:
resolution: {integrity: sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg==}
engines: {node: ^10 || ^12 || >=14}
dependencies:
nanoid: 3.1.30
picocolors: 1.0.0
source-map-js: 1.0.1
/prelude-ls/1.2.1:
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
engines: {node: '>= 0.8.0'}
dev: true
/prettier-linter-helpers/1.0.0:
resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==}
engines: {node: '>=6.0.0'}
dependencies:
fast-diff: 1.2.0
dev: true
/prettier/2.5.1:
resolution: {integrity: sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==}
engines: {node: '>=10.13.0'}
hasBin: true
dev: true
/pretty-format/27.4.6:
resolution: {integrity: sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==}
engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0}
dependencies:
ansi-regex: 5.0.1
ansi-styles: 5.2.0
react-is: 17.0.2
dev: false
/progress/2.0.3:
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
engines: {node: '>=0.4.0'}
dev: true
/promise/7.3.1:
resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==}
dependencies:
asap: 2.0.6
dev: true
/prr/1.0.1:
resolution: {integrity: sha1-0/wRS6BplaRexok/SEzrHXj19HY=}
dev: true
optional: true
/pug-attrs/3.0.0:
resolution: {integrity: sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==}
dependencies:
constantinople: 4.0.1
js-stringify: 1.0.2
pug-runtime: 3.0.1
dev: true
/pug-code-gen/3.0.2:
resolution: {integrity: sha512-nJMhW16MbiGRiyR4miDTQMRWDgKplnHyeLvioEJYbk1RsPI3FuA3saEP8uwnTb2nTJEKBU90NFVWJBk4OU5qyg==}
dependencies:
constantinople: 4.0.1
doctypes: 1.1.0
js-stringify: 1.0.2
pug-attrs: 3.0.0
pug-error: 2.0.0
pug-runtime: 3.0.1
void-elements: 3.1.0
with: 7.0.2
dev: true
/pug-error/2.0.0:
resolution: {integrity: sha512-sjiUsi9M4RAGHktC1drQfCr5C5eriu24Lfbt4s+7SykztEOwVZtbFk1RRq0tzLxcMxMYTBR+zMQaG07J/btayQ==}
dev: true
/pug-filters/4.0.0:
resolution: {integrity: sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==}
dependencies:
constantinople: 4.0.1
jstransformer: 1.0.0
pug-error: 2.0.0
pug-walk: 2.0.0
resolve: 1.21.0
dev: true
/pug-lexer/5.0.1:
resolution: {integrity: sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==}
dependencies:
character-parser: 2.2.0
is-expression: 4.0.0
pug-error: 2.0.0
dev: true
/pug-linker/4.0.0:
resolution: {integrity: sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==}
dependencies:
pug-error: 2.0.0
pug-walk: 2.0.0
dev: true
/pug-load/3.0.0:
resolution: {integrity: sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==}
dependencies:
object-assign: 4.1.1
pug-walk: 2.0.0
dev: true
/pug-parser/6.0.0:
resolution: {integrity: sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==}
dependencies:
pug-error: 2.0.0
token-stream: 1.0.0
dev: true
/pug-runtime/3.0.1:
resolution: {integrity: sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==}
dev: true
/pug-strip-comments/2.0.0:
resolution: {integrity: sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==}
dependencies:
pug-error: 2.0.0
dev: true
/pug-walk/2.0.0:
resolution: {integrity: sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==}
dev: true
/pug/3.0.2:
resolution: {integrity: sha512-bp0I/hiK1D1vChHh6EfDxtndHji55XP/ZJKwsRqrz6lRia6ZC2OZbdAymlxdVFwd1L70ebrVJw4/eZ79skrIaw==}
dependencies:
pug-code-gen: 3.0.2
pug-filters: 4.0.0
pug-lexer: 5.0.1
pug-linker: 4.0.0
pug-load: 3.0.0
pug-parser: 6.0.0
pug-runtime: 3.0.1
pug-strip-comments: 2.0.0
dev: true
/punycode/2.1.1:
resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==}
engines: {node: '>=6'}
dev: true
/qs/6.10.2:
resolution: {integrity: sha512-mSIdjzqznWgfd4pMii7sHtaYF8rx8861hBO80SraY5GT0XQibWZWJSid0avzHGkDIZLImux2S5mXO0Hfct2QCw==}
engines: {node: '>=0.6'}
dependencies:
side-channel: 1.0.4
dev: false
/queue-microtask/1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
dev: true
/react-is/17.0.2:
resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==}
dev: false
/readdirp/3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
dependencies:
picomatch: 2.3.1
dev: true
/regexpp/3.2.0:
resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==}
engines: {node: '>=8'}
dev: true
/request-light/0.5.7:
resolution: {integrity: sha512-i/wKzvcx7Er8tZnvqSxWuNO5ZGggu2UgZAqj/RyZ0si7lBTXL7kZiI/dWxzxnQjaY7s5HEy1qK21Do4Ncr6cVw==}
dev: true
/reserved-words/0.1.2:
resolution: {integrity: sha1-AKCUD5jNUBrqqsMWQR2a3FKzGrE=}
dev: true
/resize-observer-polyfill/1.5.1:
resolution: {integrity: sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==}
dev: false
/resolve-from/4.0.0:
resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
engines: {node: '>=4'}
dev: true
/resolve-url/0.2.1:
resolution: {integrity: sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=}
deprecated: https://github.com/lydell/resolve-url#deprecated
dev: true
/resolve/1.21.0:
resolution: {integrity: sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA==}
hasBin: true
dependencies:
is-core-module: 2.8.1
path-parse: 1.0.7
supports-preserve-symlinks-flag: 1.0.0
dev: true
/reusify/1.0.4:
resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
dev: true
/rimraf/3.0.2:
resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==}
hasBin: true
dependencies:
glob: 7.2.0
dev: true
/rollup/2.63.0:
resolution: {integrity: sha512-nps0idjmD+NXl6OREfyYXMn/dar3WGcyKn+KBzPdaLecub3x/LrId0wUcthcr8oZUAcZAR8NKcfGGFlNgGL1kQ==}
engines: {node: '>=10.0.0'}
hasBin: true
optionalDependencies:
fsevents: 2.3.2
dev: true
/run-parallel/1.2.0:
resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
dependencies:
queue-microtask: 1.2.3
dev: true
/safe-buffer/5.1.2:
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
dev: true
/safer-buffer/2.1.2:
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
dev: true
/sass-loader/12.4.0_sass@1.47.0:
resolution: {integrity: sha512-7xN+8khDIzym1oL9XyS6zP6Ges+Bo2B2xbPrjdMHEYyV3AQYhd/wXeru++3ODHF0zMjYmVadblSKrPrjEkL8mg==}
engines: {node: '>= 12.13.0'}
peerDependencies:
fibers: '>= 3.1.0'
node-sass: ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0
sass: ^1.3.0
webpack: ^5.0.0
peerDependenciesMeta:
fibers:
optional: true
node-sass:
optional: true
sass:
optional: true
dependencies:
klona: 2.0.5
neo-async: 2.6.2
sass: 1.47.0
dev: true
/sass/1.47.0:
resolution: {integrity: sha512-GtXwvwgD7/6MLUZPnlA5/8cdRgC9SzT5kAnnJMRmEZQFRE3J56Foswig4NyyyQGsnmNvg6EUM/FP0Pe9Y2zywQ==}
engines: {node: '>=8.9.0'}
hasBin: true
dependencies:
chokidar: 3.5.2
immutable: 4.0.0
source-map-js: 1.0.1
dev: true
/sax/1.2.4:
resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==}
dev: true
/seemly/0.3.3:
resolution: {integrity: sha512-mAyqemz41e9HiZPMXAn7NtTExJgztwco5cdZjrt/iViU/oFeav+Q8K1c93M/tIZZ00QkT65JMr4xXQk7Vv5hWQ==}
dependencies:
'@types/jest': 27.4.0
dev: false
/semver/5.7.1:
resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==}
hasBin: true
dev: true
optional: true
/semver/6.3.0:
resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==}
hasBin: true
dev: true
/semver/7.3.5:
resolution: {integrity: sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==}
engines: {node: '>=10'}
hasBin: true
dependencies:
lru-cache: 6.0.0
dev: true
/shebang-command/2.0.0:
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
engines: {node: '>=8'}
dependencies:
shebang-regex: 3.0.0
dev: true
/shebang-regex/3.0.0:
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
engines: {node: '>=8'}
dev: true
/shvl/2.0.3:
resolution: {integrity: sha512-V7C6S9Hlol6SzOJPnQ7qzOVEWUQImt3BNmmzh40wObhla3XOYMe4gGiYzLrJd5TFa+cI2f9LKIRJTTKZSTbWgw==}
dev: false
/side-channel/1.0.4:
resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==}
dependencies:
call-bind: 1.0.2
get-intrinsic: 1.1.1
object-inspect: 1.12.0
dev: false
/slash/3.0.0:
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
engines: {node: '>=8'}
dev: true
/source-map-js/1.0.1:
resolution: {integrity: sha512-4+TN2b3tqOCd/kaGRJ/sTYA0tR0mdXx26ipdolxcwtJVqEnqNYvlCAt1q3ypy4QMlYus+Zh34RNtYLoq2oQ4IA==}
engines: {node: '>=0.10.0'}
/source-map-resolve/0.5.3:
resolution: {integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==}
dependencies:
atob: 2.1.2
decode-uri-component: 0.2.0
resolve-url: 0.2.1
source-map-url: 0.4.1
urix: 0.1.0
dev: true
/source-map-url/0.4.1:
resolution: {integrity: sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==}
dev: true
/source-map/0.5.7:
resolution: {integrity: sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=}
engines: {node: '>=0.10.0'}
dev: true
/source-map/0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
/source-map/0.7.3:
resolution: {integrity: sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==}
engines: {node: '>= 8'}
dev: true
/sourcemap-codec/1.4.8:
resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
dev: false
/strip-ansi/6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
dependencies:
ansi-regex: 5.0.1
dev: true
/strip-bom/3.0.0:
resolution: {integrity: sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=}
engines: {node: '>=4'}
dev: true
/strip-json-comments/3.1.1:
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
engines: {node: '>=8'}
dev: true
/stylus/0.54.8:
resolution: {integrity: sha512-vr54Or4BZ7pJafo2mpf0ZcwA74rpuYCZbxrHBsH8kbcXOwSfvBFwsRfpGO5OD5fhG5HDCFW737PKaawI7OqEAg==}
hasBin: true
dependencies:
css-parse: 2.0.0
debug: 3.1.0
glob: 7.2.0
mkdirp: 1.0.4
safer-buffer: 2.1.2
sax: 1.2.4
semver: 6.3.0
source-map: 0.7.3
dev: true
/supports-color/5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
dependencies:
has-flag: 3.0.0
dev: true
/supports-color/7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
dependencies:
has-flag: 4.0.0
/supports-preserve-symlinks-flag/1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
dev: true
/svg-tags/1.0.0:
resolution: {integrity: sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=}
dev: true
/text-table/0.2.0:
resolution: {integrity: sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=}
dev: true
/to-fast-properties/2.0.0:
resolution: {integrity: sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=}
engines: {node: '>=4'}
dev: true
/to-regex-range/5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
dependencies:
is-number: 7.0.0
dev: true
/token-stream/1.0.0:
resolution: {integrity: sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ=}
dev: true
/treemate/0.3.10:
resolution: {integrity: sha512-uxN98H84/zXAkn02rEeip9SNnOR889QIBXfpZnwjaanp8JRElxjdmDaKmVULclrn54J8RcJVCqfeQZsdeMjwow==}
dev: false
/tsconfig-paths/3.12.0:
resolution: {integrity: sha512-e5adrnOYT6zqVnWqZu7i/BQ3BnhzvGbjEjejFXO20lKIKpwTaupkCPgEfv4GZK1IBciJUEhYs3J3p75FdaTFVg==}
dependencies:
'@types/json5': 0.0.29
json5: 1.0.1
minimist: 1.2.5
strip-bom: 3.0.0
dev: true
/tslib/1.14.1:
resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==}
dev: true
/tslib/2.3.0:
resolution: {integrity: sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==}
dev: false
/tslib/2.3.1:
resolution: {integrity: sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==}
dev: true
/tsutils/3.21.0_typescript@4.5.4:
resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==}
engines: {node: '>= 6'}
peerDependencies:
typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta'
dependencies:
tslib: 1.14.1
typescript: 4.5.4
dev: true
/type-check/0.4.0:
resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==}
engines: {node: '>= 0.8.0'}
dependencies:
prelude-ls: 1.2.1
dev: true
/type-fest/0.20.2:
resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==}
engines: {node: '>=10'}
dev: true
/typescript-plugin-css-modules/3.4.0_typescript@4.5.4:
resolution: {integrity: sha512-2MdjfSg4MGex1csCWRUwKD+MpgnvcvLLr9bSAMemU/QYGqBsXdez0cc06H/fFhLtRoKJjXg6PSTur3Gy1Umhpw==}
peerDependencies:
typescript: '>=3.0.0'
dependencies:
dotenv: 10.0.0
icss-utils: 5.1.0_postcss@8.4.5
less: 4.1.2
lodash.camelcase: 4.3.0
postcss: 8.4.5
postcss-filter-plugins: 3.0.1
postcss-icss-keyframes: 0.2.1
postcss-icss-selectors: 2.0.3
postcss-load-config: 3.1.1
reserved-words: 0.1.2
sass: 1.47.0
stylus: 0.54.8
tsconfig-paths: 3.12.0
typescript: 4.5.4
transitivePeerDependencies:
- ts-node
dev: true
/typescript/4.5.4:
resolution: {integrity: sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==}
engines: {node: '>=4.2.0'}
hasBin: true
dev: true
/universalify/2.0.0:
resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
engines: {node: '>= 10.0.0'}
dev: true
/upath/2.0.1:
resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==}
engines: {node: '>=4'}
dev: true
/uri-js/4.4.1:
resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==}
dependencies:
punycode: 2.1.1
dev: true
/urix/0.1.0:
resolution: {integrity: sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=}
deprecated: Please see https://github.com/lydell/urix#deprecated
dev: true
/v8-compile-cache/2.3.0:
resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==}
dev: true
/vdirs/0.1.7_vue@3.2.26:
resolution: {integrity: sha512-MEUaLhV1jJyUqA2Ar4DfvlQx8jWs+PpCZ2dbM0ILelpMWGOybzt8ddL456VxeIbY/tkuDGT/Wzb8GG4LCuLuHw==}
peerDependencies:
vue: ^3.0.11
dependencies:
evtd: 0.2.3
vue: 3.2.26
dev: false
/vfonts/0.1.0:
resolution: {integrity: sha512-vQBcvntBlnAPonAkGNM8iJ9NxE3PucA+V2W95xiN75YJKxirLJvOws2kEyOEO45T4N+YTbQOCR2m77Y05pfVhQ==}
dev: false
/vite-plugin-compression/0.3.6_vite@2.7.10:
resolution: {integrity: sha512-aSskQCJsP3VQ8PsnY+vO7UfD5qoFMOEuzg0PG2E9Zqyx+ARmc3wr9KCgOFraZOFW1Y4UAa5BR0SMTjoxHRMJoQ==}
peerDependencies:
vite: '>=2.0.0'
dependencies:
chalk: 4.1.2
debug: 4.3.3
fs-extra: 10.0.0
vite: 2.7.10_sass@1.47.0
transitivePeerDependencies:
- supports-color
dev: true
/vite/2.7.10_sass@1.47.0:
resolution: {integrity: sha512-KEY96ntXUid1/xJihJbgmLZx7QSC2D4Tui0FdS0Old5OokYzFclcofhtxtjDdGOk/fFpPbHv9yw88+rB93Tb8w==}
engines: {node: '>=12.2.0'}
hasBin: true
peerDependencies:
less: '*'
sass: '*'
stylus: '*'
peerDependenciesMeta:
less:
optional: true
sass:
optional: true
stylus:
optional: true
dependencies:
esbuild: 0.13.15
postcss: 8.4.5
resolve: 1.21.0
rollup: 2.63.0
sass: 1.47.0
optionalDependencies:
fsevents: 2.3.2
dev: true
/void-elements/3.1.0:
resolution: {integrity: sha1-YU9/v42AHwu18GYfWy9XhXUOTwk=}
engines: {node: '>=0.10.0'}
dev: true
/vooks/0.2.12_vue@3.2.26:
resolution: {integrity: sha512-iox0I3RZzxtKlcgYaStQYKEzWWGAduMmq+jS7OrNdQo1FgGfPMubGL3uGHOU9n97NIvfFDBGnpSvkWyb/NSn/Q==}
peerDependencies:
vue: ^3.0.0
dependencies:
evtd: 0.2.3
vue: 3.2.26
dev: false
/vscode-css-languageservice/5.1.9:
resolution: {integrity: sha512-/tFOWeZBL3Oc9Zc+2MAi3rEwiXJTSZsvjB+M7nSjWLbGPUIjukUA7YzLgsBoUfR35sPJYnXWUkL56PdfIYM8GA==}
dependencies:
vscode-languageserver-textdocument: 1.0.3
vscode-languageserver-types: 3.16.0
vscode-nls: 5.0.0
vscode-uri: 3.0.3
dev: true
/vscode-html-languageservice/4.2.1:
resolution: {integrity: sha512-PgaToZVXJ44nFWEBuSINdDgVV6EnpC3MnXBsysR3O5TKcAfywbYeRGRy+Y4dVR7YeUgDvtb+JkJoSkaYC0mxXQ==}
dependencies:
vscode-languageserver-textdocument: 1.0.3
vscode-languageserver-types: 3.16.0
vscode-nls: 5.0.0
vscode-uri: 3.0.3
dev: true
/vscode-json-languageservice/4.1.10:
resolution: {integrity: sha512-IHliMEEYSY0tJjJt0ECb8ESx/nRXpoy9kN42WVQXgaqGyizFAf3jibSiezDQTrrY7f3kywXggCU+kkJEM+OLZQ==}
dependencies:
jsonc-parser: 3.0.0
vscode-languageserver-textdocument: 1.0.3
vscode-languageserver-types: 3.16.0
vscode-nls: 5.0.0
vscode-uri: 3.0.3
dev: true
/vscode-jsonrpc/8.0.0-next.4:
resolution: {integrity: sha512-i+wvza5Wd0YV/t9qhnS8I+dJdhJ1fHIhRW4f262rXXM9Mgts5VZhYrRZufGcai4y99RlbZvwaZhplQ6diRXkaA==}
engines: {node: '>=8.0.0 || >=10.0.0'}
dev: true
/vscode-languageserver-protocol/3.17.0-next.11:
resolution: {integrity: sha512-9FqHT7XvM6tWFsnLvRfuQA7Zh7wZZYAwA9dK85lYthA8M1aXpXEP9drXVvO/Fe03MUeJpKVf2e4/NvDaFUnttg==}
dependencies:
vscode-jsonrpc: 8.0.0-next.4
vscode-languageserver-types: 3.17.0-next.5
dev: true
/vscode-languageserver-textdocument/1.0.3:
resolution: {integrity: sha512-ynEGytvgTb6HVSUwPJIAZgiHQmPCx8bZ8w5um5Lz+q5DjP0Zj8wTFhQpyg8xaMvefDytw2+HH5yzqS+FhsR28A==}
dev: true
/vscode-languageserver-types/3.16.0:
resolution: {integrity: sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==}
dev: true
/vscode-languageserver-types/3.17.0-next.5:
resolution: {integrity: sha512-Zcfaw8BznhlJWB09LDR0dscXyxn9+liREqJnPF4pigeUCHwKxYapYqizwuCpMHQ/oLYiAvKwU+f28hPleYu7pA==}
dev: true
/vscode-languageserver/8.0.0-next.5:
resolution: {integrity: sha512-3E2W0eWtGKb6QAJqspOnD0thrBRRo8IGUMV5jpDNMcMKvmtkcxMwsBh0VxdvuWaZ51PiNyR4L+B+GUvkYsyFEg==}
hasBin: true
dependencies:
vscode-languageserver-protocol: 3.17.0-next.11
dev: true
/vscode-nls/5.0.0:
resolution: {integrity: sha512-u0Lw+IYlgbEJFF6/qAqG2d1jQmJl0eyAGJHoAJqr2HT4M2BNuQYSEiSE75f52pXHSJm8AlTjnLLbBFPrdz2hpA==}
dev: true
/vscode-pug-languageservice/0.28.10:
resolution: {integrity: sha512-zhpNmMxltAlid4ZWVq0YrCbD0v2Nk/OsUl2q1pZkSJheGVMj/ZAlcYqDvWjLbMfGPtpvoC6nPxhSCc6sIDN9XA==}
dependencies:
'@volar/code-gen': 0.28.10
'@volar/shared': 0.28.10
'@volar/source-map': 0.28.10
'@volar/transforms': 0.28.10
pug-lexer: 5.0.1
pug-parser: 6.0.0
vscode-languageserver: 8.0.0-next.5
dev: true
/vscode-typescript-languageservice/0.28.10:
resolution: {integrity: sha512-TTJSQss0YR784e0Rr8se5huxd0edqGzO7A51kejEQiPPhIcOlYCEeeFxDtqv3S+/fUUkeFVdRBZA9Ie7Jfrldw==}
dependencies:
'@volar/shared': 0.28.10
semver: 7.3.5
upath: 2.0.1
vscode-languageserver: 8.0.0-next.5
vscode-languageserver-textdocument: 1.0.3
dev: true
/vscode-uri/2.1.2:
resolution: {integrity: sha512-8TEXQxlldWAuIODdukIb+TR5s+9Ds40eSJrw+1iDDA9IFORPjMELarNQE3myz5XIkWWpdprmJjm1/SxMlWOC8A==}
dev: true
/vscode-uri/3.0.3:
resolution: {integrity: sha512-EcswR2S8bpR7fD0YPeS7r2xXExrScVMxg4MedACaWHEtx9ftCF/qHG1xGkolzTPcEmjTavCQgbVzHUIdTMzFGA==}
dev: true
/vscode-vue-languageservice/0.28.10:
resolution: {integrity: sha512-xsA9aEiELiA9zHxzhI58Y6crcSfqxtt3EDKyey9rcNYe/bdY1NY0qLh3SRxdXF8YwoxzRvnn4iUw0oxCjHnFUQ==}
dependencies:
'@volar/code-gen': 0.28.10
'@volar/html2pug': 0.28.10
'@volar/shared': 0.28.10
'@volar/source-map': 0.28.10
'@volar/transforms': 0.28.10
'@vscode/emmet-helper': 2.8.3
'@vue/compiler-dom': 3.2.26
'@vue/reactivity': 3.2.26
'@vue/shared': 3.2.26
request-light: 0.5.7
upath: 2.0.1
vscode-css-languageservice: 5.1.9
vscode-html-languageservice: 4.2.1
vscode-json-languageservice: 4.1.10
vscode-languageserver: 8.0.0-next.5
vscode-languageserver-textdocument: 1.0.3
vscode-pug-languageservice: 0.28.10
vscode-typescript-languageservice: 0.28.10
dev: true
/vue-demi/0.12.1_vue@3.2.26:
resolution: {integrity: sha512-QL3ny+wX8c6Xm1/EZylbgzdoDolye+VpCXRhI2hug9dJTP3OUJ3lmiKN3CsVV3mOJKwFi0nsstbgob0vG7aoIw==}
engines: {node: '>=12'}
hasBin: true
requiresBuild: true
peerDependencies:
'@vue/composition-api': ^1.0.0-rc.1
vue: ^3.0.0-0 || ^2.6.0
peerDependenciesMeta:
'@vue/composition-api':
optional: true
dependencies:
vue: 3.2.26
dev: false
/vue-eslint-parser/8.0.1_eslint@8.6.0:
resolution: {integrity: sha512-lhWjDXJhe3UZw2uu3ztX51SJAPGPey1Tff2RK3TyZURwbuI4vximQLzz4nQfCv8CZq4xx7uIiogHMMoSJPr33A==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: '>=6.0.0'
dependencies:
debug: 4.3.3
eslint: 8.6.0
eslint-scope: 6.0.0
eslint-visitor-keys: 3.1.0
espree: 9.3.0
esquery: 1.4.0
lodash: 4.17.21
semver: 7.3.5
transitivePeerDependencies:
- supports-color
dev: true
/vue-i18n/9.2.0-beta.26_vue@3.2.26:
resolution: {integrity: sha512-xZgisyirT9hXFyXBL8pO8bYgdG7m98NLAUhbb1O9hMQS1qOXYU902+LIJA5k3BtoeiEIfjkGADHhSAlJTTzc9A==}
engines: {node: '>= 12'}
peerDependencies:
vue: ^3.0.0
dependencies:
'@intlify/core-base': 9.2.0-beta.26
'@intlify/shared': 9.2.0-beta.26
'@intlify/vue-devtools': 9.2.0-beta.26
'@vue/devtools-api': 6.0.0-beta.21.1
vue: 3.2.26
dev: false
/vue-router/4.0.12_vue@3.2.26:
resolution: {integrity: sha512-CPXvfqe+mZLB1kBWssssTiWg4EQERyqJZes7USiqfW9B5N2x+nHlnsM1D3b5CaJ6qgCvMmYJnz+G0iWjNCvXrg==}
peerDependencies:
vue: ^3.0.0
dependencies:
'@vue/devtools-api': 6.0.0-beta.21.1
vue: 3.2.26
dev: false
/vue-tsc/0.28.10_typescript@4.5.4:
resolution: {integrity: sha512-tGD7eC74MHqKH2/F66AYkC1zNiLrgnhMzeYWou3p/wApMaUEM4h29HqYoKN6uE+pq87uvq/penYqUSBXhIwLiA==}
hasBin: true
peerDependencies:
typescript: '*'
dependencies:
'@volar/shared': 0.28.10
typescript: 4.5.4
vscode-vue-languageservice: 0.28.10
dev: true
/vue/3.2.26:
resolution: {integrity: sha512-KD4lULmskL5cCsEkfhERVRIOEDrfEL9CwAsLYpzptOGjaGFNWo3BQ9g8MAb7RaIO71rmVOziZ/uEN/rHwcUIhg==}
dependencies:
'@vue/compiler-dom': 3.2.26
'@vue/compiler-sfc': 3.2.26
'@vue/runtime-dom': 3.2.26
'@vue/server-renderer': 3.2.26_vue@3.2.26
'@vue/shared': 3.2.26
dev: false
/vueuc/0.4.19_7fea039e05dc0d0bb67319189a00d1ad:
resolution: {integrity: sha512-mzhYwv60cldElLolvG5sMsDFNMP1+vQbxPnbWzT2BdIG6XfF0hzCxq0KMoYpg4cAekKijty2txRgNkySIjTWLA==}
peerDependencies:
'@css-render/vue3-ssr': ^0.15.7
vue: ^3.0.11
dependencies:
'@css-render/vue3-ssr': 0.15.8_vue@3.2.26
css-render: 0.15.8
evtd: 0.2.3
resize-observer-polyfill: 1.5.1
seemly: 0.3.3
vdirs: 0.1.7_vue@3.2.26
vooks: 0.2.12_vue@3.2.26
vue: 3.2.26
dev: false
/which/2.0.2:
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
engines: {node: '>= 8'}
hasBin: true
dependencies:
isexe: 2.0.0
dev: true
/with/7.0.2:
resolution: {integrity: sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==}
engines: {node: '>= 10.0.0'}
dependencies:
'@babel/parser': 7.16.7
'@babel/types': 7.16.7
assert-never: 1.2.1
babel-walk: 3.0.0-canary-5
dev: true
/word-wrap/1.2.3:
resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==}
engines: {node: '>=0.10.0'}
dev: true
/wrappy/1.0.2:
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
dev: true
/yallist/4.0.0:
resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
dev: true
/yaml/1.10.2:
resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==}
engines: {node: '>= 6'}
dev: true
/zrender/5.2.1:
resolution: {integrity: sha512-M3bPGZuyLTNBC6LiNKXJwSCtglMp8XUEqEBG+2MdICDI3d1s500Y4P0CzldQGsqpRVB7fkvf3BKQQRxsEaTlsw==}
dependencies:
tslib: 2.3.0
dev: false
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/src/service/modules/task-group/types.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
interface ListReq {
pageNo: number
pageSize: number
searchVal?: string
}
interface TaskGroupIdReq {
id: number
}
interface TaskGroupReq {
name: string
projectCode: string
groupSize: number
description: string
}
interface TaskGroupUpdateReq extends TaskGroupReq, TaskGroupIdReq {}
interface TaskGroup {
id: number
name: string
projectCode: number
projectName: string
groupSize: number
useSize: number
status: number
description: string
createTime: string
updateTime: string
}
interface TaskGroupRes {
totalList: TaskGroup[]
total: number
totalPage: number
pageSize: number
currentPage: number
start: number
}
export {
ListReq,
TaskGroupIdReq,
TaskGroupReq,
TaskGroupUpdateReq,
TaskGroup,
TaskGroupRes
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/src/views/resource/taskGroupOption/components/form-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, onMounted, ref, toRaw } from 'vue'
import { NForm, NFormItem, NInput, NSelect } from 'naive-ui'
import { useForm } from '../use-form'
import Modal from '@/components/modal'
import { createTaskGroup, updateTaskGroup } from '@/service/modules/task-group'
import { queryAllProjectList } from '@/service/modules/projects'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
data: {
type: Object as PropType<any>
},
status: {
type: Number as PropType<number>,
default: 0
}
}
const FormModal = defineComponent({
name: 'FormModal',
props,
emits: ['confirm', 'cancel'],
setup(props, { emit }) {
const { state, t } = useForm()
const projectOptions = ref([])
onMounted(() => {
queryAllProjectList().then((res: any) => {
res.map((item) => {
projectOptions.value.push({ label: item.name, value: item.code })
})
})
if (props.status === 1) {
state.formData.id = props.data.id
state.formData.name = props.data.name
state.formData.projectCode = props.data.projectCode
state.formData.groupSize = props.data.groupSize
state.formData.status = props.data.status
state.formData.description = props.data.description
} else {
state.formData.groupSize = 10
}
})
const onConfirm = () => {
;(props.status === 1
? updateTaskGroup(state.formData)
: createTaskGroup(state.formData)
).then(() => {
emit('confirm')
})
}
const onCancel = () => {
state.formData.projectCode = 0
state.formData.description = ''
emit('cancel')
}
return { ...toRefs(state), t, onConfirm, onCancel, projectOptions }
},
render() {
const { t, onConfirm, onCancel, show, status, projectOptions } = this
return (
<Modal
title={
status === 0
? t('resource.task_group_option.create')
: t('resource.task_group_option.edit')
}
show={show}
onConfirm={onConfirm}
onCancel={onCancel}
confirmDisabled={
!this.formData.name ||
!this.formData.groupSize ||
!this.formData.description
}
>
<NForm rules={this.rules} ref='formRef'>
<NFormItem label={t('resource.task_group_option.name')} path='name'>
<NInput
v-model={[this.formData.name, 'value']}
placeholder={t('resource.task_group_option.please_enter_name')}
/>
</NFormItem>
<NFormItem
label={t('resource.task_group_option.project_name')}
path='projectCode'
>
<NSelect
options={projectOptions}
v-model={[this.formData.projectCode, 'value']}
placeholder={t(
'resource.task_group_option.please_select_project'
)}
/>
</NFormItem>
<NFormItem
label={t('resource.task_group_option.resource_pool_size')}
path='groupSize'
>
<NInput
v-model={[this.formData.groupSize, 'value']}
placeholder={t(
'resource.task_group_option.please_enter_resource_pool_size'
)}
/>
</NFormItem>
<NFormItem
label={t('resource.task_group_option.desc')}
path='description'
>
<NInput
v-model={[this.formData.description, 'value']}
type='textarea'
placeholder={t('resource.task_group_option.please_enter_desc')}
/>
</NFormItem>
</NForm>
</Modal>
)
}
})
export default FormModal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/src/views/resource/taskGroupOption/use-form.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { useI18n } from 'vue-i18n'
import { reactive, ref } from 'vue'
import type { FormRules } from 'naive-ui'
export function useForm() {
const { t } = useI18n()
const state = reactive({
formRef: ref(),
formData: {
id: 0,
name: '',
projectCode: 0,
groupSize: 0,
status: 1,
description: ''
},
rules: {
name: {
required: true,
trigger: ['input', 'blur'],
validator() {
if (state.formData.name === '') {
return new Error(t('resource.task_group_option.please_enter_name'))
}
}
},
description: {
required: true,
trigger: ['input', 'blur'],
validator() {
if (state.formData.description === '') {
return new Error(t('resource.task_group_option.please_enter_desc'))
}
}
}
} as FormRules
})
return { state, t }
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/src/views/resource/taskGroupOption/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { h, reactive, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useAsyncState } from '@vueuse/core'
import { format } from 'date-fns'
import { useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import type { TableColumns } from 'naive-ui/es/data-table/src/interface'
import { queryTaskGroupListPaging } from '@/service/modules/task-group'
import { queryAllProjectList } from '@/service/modules/projects'
import TableAction from './components/table-action'
import _ from 'lodash'
export function useTable(
updateItem = (
id: number,
name: string,
projectCode: number,
groupSize: number,
description: string,
status: number
): void => {},
resetTableData = () => {}
) {
const { t } = useI18n()
const router: Router = useRouter()
const columns: TableColumns<any> = [
{ title: t('resource.task_group_option.id'), key: 'index' },
{ title: t('resource.task_group_option.name'), key: 'name' },
{ title: t('resource.task_group_option.project_name'), key: 'projectName' },
{
title: t('resource.task_group_option.resource_pool_size'),
key: 'groupSize'
},
{
title: t('resource.task_group_option.resource_used_pool_size'),
key: 'useSize'
},
{ title: t('resource.task_group_option.desc'), key: 'description' },
{ title: t('resource.task_group_option.create_time'), key: 'createTime' },
{ title: t('resource.task_group_option.update_time'), key: 'updateTime' },
{
title: t('resource.task_group_option.actions'),
key: 'actions',
width: 150,
render: (row: any) =>
h(TableAction, {
row,
onResetTableData: () => {
if (variables.page > 1 && variables.tableData.length === 1) {
variables.page -= 1
}
resetTableData()
},
onUpdateItem: (
id: number,
name: string,
projectCode: number,
groupSize: number,
description: string,
status: number
) => {
updateItem(id, name, projectCode, groupSize, description, status)
}
})
}
]
const variables = reactive({
tableData: [],
page: ref(1),
pageSize: ref(10),
name: ref(null),
totalPage: ref(1)
})
const getTableData = (params: any) => {
const { state } = useAsyncState(
Promise.all([
queryTaskGroupListPaging(params),
queryAllProjectList()
]).then((values) => {
variables.totalPage = values[0].totalPage
variables.tableData = values[0].totalList.map((item, index) => {
item.projectName = _.find(values[1], { code: item.projectCode }).name
item.createTime = format(
new Date(item.createTime),
'yyyy-MM-dd HH:mm:ss'
)
item.updateTime = format(
new Date(item.updateTime),
'yyyy-MM-dd HH:mm:ss'
)
return {
index: index + 1,
...item
}
}) as any
}),
{}
)
return state
}
return { getTableData, variables, columns }
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,785 | [Feature][UI Next] Resource. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] File Manage
- [x] Resource Manage
- [ ] Function Manage
- [x] Task Group Option
- [x] Task Group Queue
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7785 | https://github.com/apache/dolphinscheduler/pull/8083 | 3456904af8601556cecff5a07b260d092ca13bcb | 8c188e809b2da397948ce6b576630b6a19f23b64 | "2022-01-04T07:22:20Z" | java | "2022-01-17T07:04:55Z" | dolphinscheduler-ui-next/src/views/resource/udf/resource/index.module.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.content {
width: 100%;
.card {
margin-bottom: 8px;
}
.header {
display: flex;
justify-content: space-between;
align-items: center;
margin: 10px 0;
.right {
> .search {
.list {
float: right;
margin: 3px 0 3px 4px;
}
}
}
}
.table {
table {
width: 100%;
tr {
height: 40px;
font-size: 12px;
th,td{
&:nth-child(1) {
width: 50px;
text-align: center;
}
}
th {
&:nth-child(1) {
width: 60px;
text-align: center;
}
>span {
font-size: 12px;
color: #555;
}
}
}
}
}
.pagination {
display: flex;
justify-content: center;
align-items: center;
margin-top: 20px;
}
.links {
color: #2080f0;
text-decoration: none;
&:hover {
text-decoration: underline;
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,812 | [Feature][UI Next] Monitor. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
- [x] Master
- [x] Worker
- [x] DB
- [x] Statistics
- [x] Audit Log
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7812 | https://github.com/apache/dolphinscheduler/pull/8088 | 2a773802ef7134c4faa93edd8eaacabbea7aabe2 | e7cd16706ab3284d15178ded2de154677fc356d3 | "2022-01-05T07:16:14Z" | java | "2022-01-17T11:06:11Z" | dolphinscheduler-ui-next/src/router/modules/monitor.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { Component } from 'vue'
import utils from '@/utils'
// All TSX files under the views folder automatically generate mapping relationship
const modules = import.meta.glob('/src/views/**/**.tsx')
const components: { [key: string]: Component } = utils.mapping(modules)
export default {
path: '/monitor',
name: 'monitor',
meta: { title: 'monitor' },
redirect: { name: 'servers-master' },
component: () => import('@/layouts/content'),
children: [
{
path: '/monitor/servers/master',
name: 'servers-master',
component: components['master'],
meta: {
title: '服务管理-Master'
}
},
{
path: '/monitor/servers/worker',
name: 'servers-worker',
component: components['worker'],
meta: {
title: '服务管理-Worker'
}
},
{
path: '/monitor/servers/db',
name: 'servers-db',
component: components['db'],
meta: {
title: '服务管理-DB'
}
},
{
path: '/monitor/statistics/statistics',
name: 'statistics-statistics',
component: components['statistics'],
meta: {
title: '统计管理-Statistics'
}
}
]
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,993 | [Feature][CI] Optimize Reousrce Center E2E test | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
At present, the E2E test in the resource center introduces the HDFS container, which is a small Hadoop stand-alone container. Limited by the 2C 7g configuration of GitHub actions, in some cases, the high load of the server will be caused by the high resource occupation of the HDFS container, resulting in the failure of the Reousrce Center E2E test.
Based on the above problems, I put forward two optimization suggestions:
1. Using an external HDFS container
2. Using S3 storage, Maybe Minio S3 is better and lighter
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7993 | https://github.com/apache/dolphinscheduler/pull/8066 | e7cd16706ab3284d15178ded2de154677fc356d3 | b705a08794f34acdd0be32e6e535a1e6973267ad | "2022-01-13T08:04:40Z" | java | "2022-01-17T11:32:47Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# user data local directory path, please make sure the directory exists and have read write permissions
data.basedir.path=/tmp/dolphinscheduler
# resource storage type: HDFS, S3, NONE
resource.storage.type=HDFS
# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended
resource.upload.path=/dolphinscheduler
# whether to startup kerberos
hadoop.security.authentication.startup.state=false
# java.security.krb5.conf path
java.security.krb5.conf.path=/opt/krb5.conf
# login user from keytab username
login.user.keytab.username=hdfs-mycluster@ESZ.COM
# login user from keytab path
login.user.keytab.path=/opt/hdfs.headless.keytab
# kerberos expire time, the unit is hour
kerberos.expire.time=2
# resource view suffixs
#resource.view.suffixs=txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js
# if resource.storage.type=HDFS, the user must have the permission to create directories under the HDFS root path
hdfs.root.user=hdfs
# if resource.storage.type=S3, the value like: s3a://dolphinscheduler; if resource.storage.type=HDFS and namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir
fs.defaultFS=hdfs://hdfs:8020
# if resource.storage.type=S3, s3 endpoint
fs.s3a.endpoint=http://192.168.xx.xx:9010
# if resource.storage.type=S3, s3 access key
fs.s3a.access.key=A3DXS30FO22544RE
# if resource.storage.type=S3, s3 secret key
fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK
# resourcemanager port, the default value is 8088 if not specified
resource.manager.httpaddress.port=8088
# if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single, keep this value empty
yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname
yarn.application.status.address=http://ds1:%s/ws/v1/cluster/apps/%s
# job history status url when application number threshold is reached(default 10000, maybe it was set to 1000)
yarn.job.history.status.address=http://ds1:19888/ws/v1/history/mapreduce/jobs/%s
# datasource encryption enable
datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions
sudo.enable=true
# network interface preferred like eth0, default: empty
#dolphin.scheduler.network.interface.preferred=
# network IP gets priority, default: inner outer
#dolphin.scheduler.network.priority.strategy=default
# system env path
#dolphinscheduler.env.path=env/dolphinscheduler_env.sh
# development state
development.state=false
# rpc port
alert.rpc.port=50052
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,993 | [Feature][CI] Optimize Reousrce Center E2E test | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
At present, the E2E test in the resource center introduces the HDFS container, which is a small Hadoop stand-alone container. Limited by the 2C 7g configuration of GitHub actions, in some cases, the high load of the server will be caused by the high resource occupation of the HDFS container, resulting in the failure of the Reousrce Center E2E test.
Based on the above problems, I put forward two optimization suggestions:
1. Using an external HDFS container
2. Using S3 storage, Maybe Minio S3 is better and lighter
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7993 | https://github.com/apache/dolphinscheduler/pull/8066 | e7cd16706ab3284d15178ded2de154677fc356d3 | b705a08794f34acdd0be32e6e535a1e6973267ad | "2022-01-13T08:04:40Z" | java | "2022-01-17T11:32:47Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/docker-compose.yaml | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
version: "2.1"
services:
dolphinscheduler:
image: apache/dolphinscheduler-standalone-server:ci
environment:
MASTER_MAX_CPU_LOAD_AVG: 100
WORKER_TENANT_AUTO_CREATE: 'true'
expose:
- 12345
networks:
- e2e
healthcheck:
test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ]
interval: 5s
timeout: 60s
retries: 120
volumes:
- ./common.properties:/opt/dolphinscheduler/conf/common.properties
depends_on:
hdfs:
condition: service_healthy
hdfs:
image: mdouchement/hdfs:latest
hostname: hdfs
tty: true
stdin_open: true
expose:
- 8020
networks:
- e2e
healthcheck:
test: [ "CMD", "curl", "http://localhost:50070" ]
interval: 5s
timeout: 120s
retries: 120
networks:
e2e:
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,053 | [Bug] [dolphinscheduler-service] CronUtils.getMaxCycle return null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
每年一月每天早上七点
I want to run the task every January at 7am
when I pass this cron , it should return YEAR , not null
CronUtils.getMaxCycle("0 0 7 * 1 ? *")
I want run this task ,every Jan
### What you expected to happen
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
YEAR
### How to reproduce
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8053 | https://github.com/apache/dolphinscheduler/pull/8086 | b705a08794f34acdd0be32e6e535a1e6973267ad | e0a99dd9093dacd161512535b0046699f3cf26af | "2022-01-14T13:21:02Z" | java | "2022-01-18T02:44:30Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.quartz.cron;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import com.cronutils.model.Cron;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.Always;
import com.cronutils.model.field.expression.And;
import com.cronutils.model.field.expression.Between;
import com.cronutils.model.field.expression.Every;
import com.cronutils.model.field.expression.FieldExpression;
import com.cronutils.model.field.expression.On;
/**
* Cycle
*/
public abstract class AbstractCycle {
protected Cron cron;
protected CronField minField;
protected CronField hourField;
protected CronField dayOfMonthField;
protected CronField dayOfWeekField;
protected CronField monthField;
protected CronField yearField;
public CycleLinks addCycle(AbstractCycle cycle) {
return new CycleLinks(this.cron).addCycle(this).addCycle(cycle);
}
/**
* cycle constructor
*
* @param cron cron
*/
protected AbstractCycle(Cron cron) {
if (cron == null) {
throw new IllegalArgumentException("cron must not be null!");
}
this.cron = cron;
this.minField = cron.retrieve(CronFieldName.MINUTE);
this.hourField = cron.retrieve(CronFieldName.HOUR);
this.dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH);
this.dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK);
this.monthField = cron.retrieve(CronFieldName.MONTH);
this.yearField = cron.retrieve(CronFieldName.YEAR);
}
/**
* whether the minute field has a value
*
* @return if minute field has a value return true,else return false
*/
protected boolean minFiledIsSetAll() {
FieldExpression minFieldExpression = minField.getExpression();
return (minFieldExpression instanceof Every || minFieldExpression instanceof Always
|| minFieldExpression instanceof Between || minFieldExpression instanceof And
|| minFieldExpression instanceof On);
}
/**
* whether the minute field has a value of every or always
*
* @return if minute field has a value of every or always return true,else return false
*/
protected boolean minFiledIsEvery() {
FieldExpression minFieldExpression = minField.getExpression();
return (minFieldExpression instanceof Every || minFieldExpression instanceof Always);
}
/**
* whether the hour field has a value
*
* @return if hour field has a value return true,else return false
*/
protected boolean hourFiledIsSetAll() {
FieldExpression hourFieldExpression = hourField.getExpression();
return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always
|| hourFieldExpression instanceof Between || hourFieldExpression instanceof And
|| hourFieldExpression instanceof On);
}
/**
* whether the hour field has a value of every or always
*
* @return if hour field has a value of every or always return true,else return false
*/
protected boolean hourFiledIsEvery() {
FieldExpression hourFieldExpression = hourField.getExpression();
return (hourFieldExpression instanceof Every || hourFieldExpression instanceof Always);
}
/**
* whether the day Of month field has a value
*
* @return if day Of month field has a value return true,else return false
*/
protected boolean dayOfMonthFieldIsSetAll() {
return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always
|| dayOfMonthField.getExpression() instanceof Between || dayOfMonthField.getExpression() instanceof And
|| dayOfMonthField.getExpression() instanceof On);
}
/**
* whether the day Of Month field has a value of every or always
*
* @return if day Of Month field has a value of every or always return true,else return false
*/
protected boolean dayOfMonthFieldIsEvery() {
return (dayOfMonthField.getExpression() instanceof Every || dayOfMonthField.getExpression() instanceof Always);
}
/**
* whether month field has a value
*
* @return if month field has a value return true,else return false
*/
protected boolean monthFieldIsSetAll() {
FieldExpression monthFieldExpression = monthField.getExpression();
return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always
|| monthFieldExpression instanceof Between || monthFieldExpression instanceof And
|| monthFieldExpression instanceof On);
}
/**
* whether the month field has a value of every or always
*
* @return if month field has a value of every or always return true,else return false
*/
protected boolean monthFieldIsEvery() {
FieldExpression monthFieldExpression = monthField.getExpression();
return (monthFieldExpression instanceof Every || monthFieldExpression instanceof Always);
}
/**
* whether the day Of week field has a value
*
* @return if day Of week field has a value return true,else return false
*/
protected boolean dayofWeekFieldIsSetAll() {
FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression();
return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always
|| dayOfWeekFieldExpression instanceof Between || dayOfWeekFieldExpression instanceof And
|| dayOfWeekFieldExpression instanceof On);
}
/**
* whether the day Of week field has a value of every or always
*
* @return if day Of week field has a value of every or always return true,else return false
*/
protected boolean dayofWeekFieldIsEvery() {
FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression();
return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always);
}
/**
* get cycle enum
*
* @return CycleEnum
*/
protected abstract CycleEnum getCycle();
/**
* get mini level cycle enum
*
* @return CycleEnum
*/
protected abstract CycleEnum getMiniCycle();
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,053 | [Bug] [dolphinscheduler-service] CronUtils.getMaxCycle return null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
每年一月每天早上七点
I want to run the task every January at 7am
when I pass this cron , it should return YEAR , not null
CronUtils.getMaxCycle("0 0 7 * 1 ? *")
I want run this task ,every Jan
### What you expected to happen
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
YEAR
### How to reproduce
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8053 | https://github.com/apache/dolphinscheduler/pull/8086 | b705a08794f34acdd0be32e6e535a1e6973267ad | e0a99dd9093dacd161512535b0046699f3cf26af | "2022-01-14T13:21:02Z" | java | "2022-01-18T02:44:30Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.quartz.cron;
import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.day;
import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.hour;
import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.min;
import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.month;
import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.week;
import static com.cronutils.model.CronType.QUARTZ;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.commons.collections.CollectionUtils;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cronutils.model.Cron;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.parser.CronParser;
/**
* cron utils
*/
public class CronUtils {
private CronUtils() {
throw new IllegalStateException("CronUtils class");
}
private static final Logger logger = LoggerFactory.getLogger(CronUtils.class);
private static final CronParser QUARTZ_CRON_PARSER = new CronParser(CronDefinitionBuilder.instanceDefinitionFor(QUARTZ));
/**
* parse to cron
*
* @param cronExpression cron expression, never null
* @return Cron instance, corresponding to cron expression received
*/
public static Cron parse2Cron(String cronExpression) {
return QUARTZ_CRON_PARSER.parse(cronExpression);
}
/**
* build a new CronExpression based on the string cronExpression
*
* @param cronExpression String representation of the cron expression the new object should represent
* @return CronExpression
* @throws ParseException if the string expression cannot be parsed into a valid
*/
public static CronExpression parse2CronExpression(String cronExpression) throws ParseException {
return new CronExpression(cronExpression);
}
/**
* get max cycle
*
* @param cron cron
* @return CycleEnum
*/
public static CycleEnum getMaxCycle(Cron cron) {
return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getCycle();
}
/**
* get min cycle
*
* @param cron cron
* @return CycleEnum
*/
public static CycleEnum getMiniCycle(Cron cron) {
return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getMiniCycle();
}
/**
* get max cycle
*
* @param crontab crontab
* @return CycleEnum
*/
public static CycleEnum getMaxCycle(String crontab) {
return getMaxCycle(parse2Cron(crontab));
}
/**
* gets all scheduled times for a period of time based on not self dependency
*
* @param startTime startTime
* @param endTime endTime
* @param cronExpression cronExpression
* @return date list
*/
public static List<Date> getFireDateList(Date startTime, Date endTime, CronExpression cronExpression) {
List<Date> dateList = new ArrayList<>();
while (Stopper.isRunning()) {
startTime = cronExpression.getNextValidTimeAfter(startTime);
if (startTime.after(endTime)) {
break;
}
dateList.add(startTime);
}
return dateList;
}
/**
* gets expect scheduled times for a period of time based on self dependency
*
* @param startTime startTime
* @param endTime endTime
* @param cronExpression cronExpression
* @param fireTimes fireTimes
* @return date list
*/
public static List<Date> getSelfFireDateList(Date startTime, Date endTime, CronExpression cronExpression, int fireTimes) {
List<Date> dateList = new ArrayList<>();
while (fireTimes > 0) {
startTime = cronExpression.getNextValidTimeAfter(startTime);
if (startTime.after(endTime) || startTime.equals(endTime)) {
break;
}
dateList.add(startTime);
fireTimes--;
}
return dateList;
}
/**
* gets all scheduled times for a period of time based on self dependency
*
* @param startTime startTime
* @param endTime endTime
* @param cronExpression cronExpression
* @return date list
*/
public static List<Date> getSelfFireDateList(Date startTime, Date endTime, CronExpression cronExpression) {
List<Date> dateList = new ArrayList<>();
while (Stopper.isRunning()) {
startTime = cronExpression.getNextValidTimeAfter(startTime);
if (startTime.after(endTime) || startTime.equals(endTime)) {
break;
}
dateList.add(startTime);
}
return dateList;
}
/**
* gets all scheduled times for a period of time based on self dependency
* if schedulers is empty then default scheduler = 1 day
*/
public static List<Date> getSelfFireDateList(final Date startTime, final Date endTime, final List<Schedule> schedules) {
List<Date> result = new ArrayList<>();
if(startTime.equals(endTime)){
result.add(startTime);
return result;
}
// support left closed and right closed time interval (startDate <= N <= endDate)
Date from = new Date(startTime.getTime() - Constants.SECOND_TIME_MILLIS);
Date to = new Date(endTime.getTime() + Constants.SECOND_TIME_MILLIS);
List<Schedule> listSchedule = new ArrayList<>();
listSchedule.addAll(schedules);
if (CollectionUtils.isEmpty(listSchedule)) {
Schedule schedule = new Schedule();
schedule.setCrontab(Constants.DEFAULT_CRON_STRING);
listSchedule.add(schedule);
}
for (Schedule schedule : listSchedule) {
result.addAll(CronUtils.getSelfFireDateList(from, to, schedule.getCrontab()));
}
return result;
}
/**
* gets all scheduled times for a period of time based on self dependency
*
* @param startTime startTime
* @param endTime endTime
* @param cron cron
* @return date list
*/
public static List<Date> getSelfFireDateList(Date startTime, Date endTime, String cron) {
CronExpression cronExpression = null;
try {
cronExpression = parse2CronExpression(cron);
} catch (ParseException e) {
logger.error(e.getMessage(), e);
return Collections.emptyList();
}
return getSelfFireDateList(startTime, endTime, cronExpression);
}
/**
* get expiration time
*
* @param startTime startTime
* @param cycleEnum cycleEnum
* @return date
*/
public static Date getExpirationTime(Date startTime, CycleEnum cycleEnum) {
Date maxExpirationTime = null;
Date startTimeMax = null;
try {
startTimeMax = getEndTime(startTime);
Calendar calendar = Calendar.getInstance();
calendar.setTime(startTime);
switch (cycleEnum) {
case HOUR:
calendar.add(Calendar.HOUR, 1);
break;
case DAY:
calendar.add(Calendar.DATE, 1);
break;
case WEEK:
calendar.add(Calendar.DATE, 1);
break;
case MONTH:
calendar.add(Calendar.DATE, 1);
break;
default:
logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum);
break;
}
maxExpirationTime = calendar.getTime();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return DateUtils.compare(startTimeMax, maxExpirationTime) ? maxExpirationTime : startTimeMax;
}
/**
* get the end time of the day by value of date
*
* @return date
*/
private static Date getEndTime(Date date) {
Calendar end = new GregorianCalendar();
end.setTime(date);
end.set(Calendar.HOUR_OF_DAY, 23);
end.set(Calendar.MINUTE, 59);
end.set(Calendar.SECOND, 59);
end.set(Calendar.MILLISECOND, 999);
return end.getTime();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,053 | [Bug] [dolphinscheduler-service] CronUtils.getMaxCycle return null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
每年一月每天早上七点
I want to run the task every January at 7am
when I pass this cron , it should return YEAR , not null
CronUtils.getMaxCycle("0 0 7 * 1 ? *")
I want run this task ,every Jan
### What you expected to happen
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
YEAR
### How to reproduce
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8053 | https://github.com/apache/dolphinscheduler/pull/8086 | b705a08794f34acdd0be32e6e535a1e6973267ad | e0a99dd9093dacd161512535b0046699f3cf26af | "2022-01-14T13:21:02Z" | java | "2022-01-18T02:44:30Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.quartz.cron;
import com.cronutils.model.Cron;
import com.cronutils.model.field.expression.Always;
import com.cronutils.model.field.expression.QuestionMark;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
/**
* Crontab Cycle Tool Factory
*/
public class CycleFactory {
private CycleFactory() {
throw new IllegalStateException("CycleFactory class");
}
/**
* min
* @param cron cron
* @return AbstractCycle
*/
public static AbstractCycle min(Cron cron) {
return new MinCycle(cron);
}
/**
* hour
* @param cron cron
* @return AbstractCycle
*/
public static AbstractCycle hour(Cron cron) {
return new HourCycle(cron);
}
/**
* day
* @param cron cron
* @return AbstractCycle
*/
public static AbstractCycle day(Cron cron) {
return new DayCycle(cron);
}
/**
* week
* @param cron cron
* @return AbstractCycle
*/
public static AbstractCycle week(Cron cron) {
return new WeekCycle(cron);
}
/**
* month
* @param cron cron
* @return AbstractCycle
*/
public static AbstractCycle month(Cron cron) {
return new MonthCycle(cron);
}
/**
* day cycle
*/
public static class DayCycle extends AbstractCycle {
public DayCycle(Cron cron) {
super(cron);
}
/**
* get cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getCycle() {
if (minFiledIsSetAll()
&& hourFiledIsSetAll()
&& dayOfMonthFieldIsEvery()
&& dayOfWeekField.getExpression() instanceof QuestionMark
&& monthField.getExpression() instanceof Always) {
return CycleEnum.DAY;
}
return null;
}
/**
* get min cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getMiniCycle() {
if (dayOfMonthFieldIsEvery()) {
return CycleEnum.DAY;
}
return null;
}
}
/**
* hour cycle
*/
public static class HourCycle extends AbstractCycle {
public HourCycle(Cron cron) {
super(cron);
}
/**
* get cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getCycle() {
if (minFiledIsSetAll()
&& hourFiledIsEvery()
&& dayOfMonthField.getExpression() instanceof Always
&& dayOfWeekField.getExpression() instanceof QuestionMark
&& monthField.getExpression() instanceof Always) {
return CycleEnum.HOUR;
}
return null;
}
/**
* get mini cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getMiniCycle() {
if(hourFiledIsEvery()){
return CycleEnum.HOUR;
}
return null;
}
}
/**
* minute cycle
*/
public static class MinCycle extends AbstractCycle {
public MinCycle(Cron cron) {
super(cron);
}
/**
* get cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getCycle() {
if (minFiledIsEvery()
&& hourField.getExpression() instanceof Always
&& dayOfMonthField.getExpression() instanceof Always
&& monthField.getExpression() instanceof Always) {
return CycleEnum.MINUTE;
}
return null;
}
/**
* get min cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getMiniCycle() {
if(minFiledIsEvery()){
return CycleEnum.MINUTE;
}
return null;
}
}
/**
* month cycle
*/
public static class MonthCycle extends AbstractCycle {
public MonthCycle(Cron cron) {
super(cron);
}
/**
* get cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getCycle() {
boolean flag = (minFiledIsSetAll()
&& hourFiledIsSetAll()
&& dayOfMonthFieldIsSetAll()
&& dayOfWeekField.getExpression() instanceof QuestionMark
&& monthFieldIsEvery()) ||
(minFiledIsSetAll()
&& hourFiledIsSetAll()
&& dayOfMonthField.getExpression() instanceof QuestionMark
&& dayofWeekFieldIsSetAll()
&& monthFieldIsEvery());
if (flag) {
return CycleEnum.MONTH;
}
return null;
}
/**
* get mini cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getMiniCycle() {
if (monthFieldIsEvery()) {
return CycleEnum.MONTH;
}
return null;
}
}
/**
* week cycle
*/
public static class WeekCycle extends AbstractCycle {
public WeekCycle(Cron cron) {
super(cron);
}
/**
* get cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getCycle() {
if (minFiledIsSetAll()
&& hourFiledIsSetAll()
&& dayOfMonthField.getExpression() instanceof QuestionMark
&& dayofWeekFieldIsEvery()
&& monthField.getExpression() instanceof Always) {
return CycleEnum.WEEK;
}
return null;
}
/**
* get mini cycle
* @return CycleEnum
*/
@Override
protected CycleEnum getMiniCycle() {
if (dayofWeekFieldIsEvery()) {
return CycleEnum.WEEK;
}
return null;
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,053 | [Bug] [dolphinscheduler-service] CronUtils.getMaxCycle return null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
每年一月每天早上七点
I want to run the task every January at 7am
when I pass this cron , it should return YEAR , not null
CronUtils.getMaxCycle("0 0 7 * 1 ? *")
I want run this task ,every Jan
### What you expected to happen
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
YEAR
### How to reproduce
System.out.println(CronUtils.getMaxCycle("0 0 7 * 1 ? *"));
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8053 | https://github.com/apache/dolphinscheduler/pull/8086 | b705a08794f34acdd0be32e6e535a1e6973267ad | e0a99dd9093dacd161512535b0046699f3cf26af | "2022-01-14T13:21:02Z" | java | "2022-01-18T02:44:30Z" | dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.quartz.cron;
import com.cronutils.builder.CronBuilder;
import com.cronutils.model.Cron;
import com.cronutils.model.CronType;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.model.field.CronField;
import com.cronutils.model.field.CronFieldName;
import com.cronutils.model.field.expression.*;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.ParseException;
import java.util.Date;
import static com.cronutils.model.field.expression.FieldExpressionFactory.*;
/**
* CronUtilsTest
*/
public class CronUtilsTest {
private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class);
/**
* cron as string test
*/
@Test
public void testCronAsString() {
Cron cron = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ))
.withYear(always())
.withDoW(questionMark())
.withMonth(always())
.withDoM(always())
.withHour(always())
.withMinute(every(5))
.withSecond(on(0))
.instance();
// Obtain the string expression
String cronAsString = cron.asString();
// 0 */5 * * * ? * Every five minutes(once every 5 minutes)
Assert.assertEquals("0 */5 * * * ? *", cronAsString);
}
/**
* cron parse test
* @throws ParseException if error throws ParseException
*/
@Test
public void testCronParse() throws ParseException {
String strCrontab = "0 1 2 3 * ? *";
Cron depCron = CronUtils.parse2Cron(strCrontab);
Assert.assertEquals("0", depCron.retrieve(CronFieldName.SECOND).getExpression().asString());
Assert.assertEquals("1", depCron.retrieve(CronFieldName.MINUTE).getExpression().asString());
Assert.assertEquals("2", depCron.retrieve(CronFieldName.HOUR).getExpression().asString());
Assert.assertEquals("3", depCron.retrieve(CronFieldName.DAY_OF_MONTH).getExpression().asString());
Assert.assertEquals("*", depCron.retrieve(CronFieldName.MONTH).getExpression().asString());
Assert.assertEquals("*", depCron.retrieve(CronFieldName.YEAR).getExpression().asString());
}
/**
* schedule type test
* @throws ParseException if error throws ParseException
*/
@Test
public void testScheduleType() throws ParseException {
CycleEnum cycleEnum = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 */1 * * * ? *"));
Assert.assertEquals("MINUTE", cycleEnum.name());
CycleEnum cycleEnum2 = CronUtils.getMaxCycle("0 * * * * ? *");
Assert.assertEquals("MINUTE", cycleEnum2.name());
CycleEnum cycleEnum3 = CronUtils.getMiniCycle(CronUtils.parse2Cron("0 * * * * ? *"));
Assert.assertEquals("MINUTE", cycleEnum3.name());
}
/**
* test
*/
@Test
public void test2(){
Cron cron1 = CronBuilder.cron(CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ))
.withYear(always())
.withDoW(questionMark())
.withMonth(always())
.withDoM(always())
.withHour(always())
.withMinute(every(5))
.withSecond(on(0))
.instance();
// minute cycle
String[] cronArayy = new String[]{"* * * * * ? *","* 0 * * * ? *",
"* 5 * * 3/5 ? *","0 0 * * * ? *"};
for(String minCrontab:cronArayy){
if (!org.quartz.CronExpression.isValidExpression(minCrontab)) {
throw new RuntimeException(minCrontab+" verify failure, cron expression not valid");
}
Cron cron = CronUtils.parse2Cron(minCrontab);
CronField minField = cron.retrieve(CronFieldName.MINUTE);
logger.info("minField instanceof Between:"+(minField.getExpression() instanceof Between));
logger.info("minField instanceof Every:"+(minField.getExpression() instanceof Every));
logger.info("minField instanceof Always:" + (minField.getExpression() instanceof Always));
logger.info("minField instanceof On:"+(minField.getExpression() instanceof On));
logger.info("minField instanceof And:"+(minField.getExpression() instanceof And));
CronField hourField = cron.retrieve(CronFieldName.HOUR);
logger.info("hourField instanceof Between:"+(hourField.getExpression() instanceof Between));
logger.info("hourField instanceof Always:"+(hourField.getExpression() instanceof Always));
logger.info("hourField instanceof Every:"+(hourField.getExpression() instanceof Every));
logger.info("hourField instanceof On:"+(hourField.getExpression() instanceof On));
logger.info("hourField instanceof And:"+(hourField.getExpression() instanceof And));
CronField dayOfMonthField = cron.retrieve(CronFieldName.DAY_OF_MONTH);
logger.info("dayOfMonthField instanceof Between:"+(dayOfMonthField.getExpression() instanceof Between));
logger.info("dayOfMonthField instanceof Always:"+(dayOfMonthField.getExpression() instanceof Always));
logger.info("dayOfMonthField instanceof Every:"+(dayOfMonthField.getExpression() instanceof Every));
logger.info("dayOfMonthField instanceof On:"+(dayOfMonthField.getExpression() instanceof On));
logger.info("dayOfMonthField instanceof And:"+(dayOfMonthField.getExpression() instanceof And));
logger.info("dayOfMonthField instanceof QuestionMark:"+(dayOfMonthField.getExpression() instanceof QuestionMark));
CronField monthField = cron.retrieve(CronFieldName.MONTH);
logger.info("monthField instanceof Between:"+(monthField.getExpression() instanceof Between));
logger.info("monthField instanceof Always:"+(monthField.getExpression() instanceof Always));
logger.info("monthField instanceof Every:"+(monthField.getExpression() instanceof Every));
logger.info("monthField instanceof On:"+(monthField.getExpression() instanceof On));
logger.info("monthField instanceof And:"+(monthField.getExpression() instanceof And));
logger.info("monthField instanceof QuestionMark:"+(monthField.getExpression() instanceof QuestionMark));
CronField dayOfWeekField = cron.retrieve(CronFieldName.DAY_OF_WEEK);
logger.info("dayOfWeekField instanceof Between:"+(dayOfWeekField.getExpression() instanceof Between));
logger.info("dayOfWeekField instanceof Always:"+(dayOfWeekField.getExpression() instanceof Always));
logger.info("dayOfWeekField instanceof Every:"+(dayOfWeekField.getExpression() instanceof Every));
logger.info("dayOfWeekField instanceof On:"+(dayOfWeekField.getExpression() instanceof On));
logger.info("dayOfWeekField instanceof And:"+(dayOfWeekField.getExpression() instanceof And));
logger.info("dayOfWeekField instanceof QuestionMark:"+(dayOfWeekField.getExpression() instanceof QuestionMark));
CycleEnum cycleEnum = CronUtils.getMaxCycle(minCrontab);
if(cycleEnum !=null){
logger.info(cycleEnum.name());
}else{
logger.info("can't get scheduleType");
}
}
Assert.assertTrue(true);
}
@Test
public void getSelfFireDateList() throws ParseException {
Date from = DateUtils.stringToDate("2020-01-01 00:00:00");
Date to = DateUtils.stringToDate("2020-01-31 00:00:00");
// test date
Assert.assertEquals(0, CronUtils.getSelfFireDateList(to, from, "0 0 0 * * ? ").size());
// test error cron
Assert.assertEquals(0, CronUtils.getSelfFireDateList(from, to, "0 0 0 * *").size());
// test cron
Assert.assertEquals(29, CronUtils.getSelfFireDateList(from, to, "0 0 0 * * ? ").size());
// test other
Assert.assertEquals(30, CronUtils.getFireDateList(from, to, CronUtils.parse2CronExpression("0 0 0 * * ? ")).size());
Assert.assertEquals(5, CronUtils.getSelfFireDateList(from, to, CronUtils.parse2CronExpression("0 0 0 * * ? "), 5).size());
from = DateUtils.stringToDate("2020-01-01 00:02:00");
to = DateUtils.stringToDate("2020-01-01 00:02:00");
Assert.assertEquals(1, CronUtils.getFireDateList(new Date(from.getTime() - 1000), to, CronUtils.parse2CronExpression("0 * * * * ? ")).size());
from = DateUtils.stringToDate("2020-01-01 00:02:00");
to = DateUtils.stringToDate("2020-01-01 00:04:00");
Assert.assertEquals(2, CronUtils.getFireDateList(new Date(from.getTime() - 1000),
new Date(to.getTime() - 1000),
CronUtils.parse2CronExpression("0 * * * * ? ")).size());
}
@Test
public void getExpirationTime(){
Date startTime = DateUtils.stringToDate("2020-02-07 18:30:00");
Date expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.HOUR);
Assert.assertEquals("2020-02-07 19:30:00", DateUtils.dateToString(expirationTime));
expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.DAY);
Assert.assertEquals("2020-02-07 23:59:59", DateUtils.dateToString(expirationTime));
expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.WEEK);
Assert.assertEquals("2020-02-07 23:59:59", DateUtils.dateToString(expirationTime));
expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.MONTH);
Assert.assertEquals("2020-02-07 23:59:59", DateUtils.dateToString(expirationTime));
expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.YEAR);
Assert.assertEquals("2020-02-07 18:30:00", DateUtils.dateToString(expirationTime));
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,092 | [Bug] [task-datax] cannot use Parameter for clickhouse datax task | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
when I used Parameter for clickhouse datax task. it was always failed .
### What you expected to happen
[WARN] 2022-01-17 20:19:50.042 - [taskAppId=TASK-540354961620992_3-13328-48308]:[480] - database driver [CLICKHOUSE] is not support grammatical analysis sql
[INFO] 2022-01-17 20:19:50.042 - [taskAppId=TASK-540354961620992_3-13328-48308]:[457] - try to execute sql analysis query column name
.......
ru.yandex.clickhouse.except.ClickHouseException: ClickHouse exception, code: 41, host: 10.17.16.46, port: 8123; Code: 41, e.displayText() = DB::ParsingException: Cannot parse datetime: Cannot parse UInt32 from String: While processing (paymentType != 'yb') AND (datatime >= (toUnixTimestamp(concat('${dtdh}', ':00:00')) * 1000)) AND (datatime < (toUnixTimestamp(concat('2022-01-17 19', ':59:59')) * 1000)) (version 21.6.6.51 (official build))
at ru.yandex.clickhouse.except.ClickHouseExceptionSpecifier.specify(ClickHouseExceptionSpecifier.java:58)
at ru.yandex.clickhouse.except.ClickHouseExceptionSpecifier.specify(ClickHouseExceptionSpecifier.java:28)
at ru.yandex.clickhouse.ClickHouseStatementImpl.checkForErrorAndThrow(ClickHouseStatementImpl.java:815)
at ru.yandex.clickhouse.ClickHouseStatementImpl.getInputStream(ClickHouseStatementImpl.java:611)
at ru.yandex.clickhouse.ClickHouseStatementImpl.executeQuery(ClickHouseStatementImpl.java:114)
at ru.yandex.clickhouse.ClickHouseStatementImpl.executeQuery(ClickHouseStatementImpl.java:97)
at ru.yandex.clickhouse.ClickHouseStatementImpl.executeQuery(ClickHouseStatementImpl.java:92)
at ru.yandex.clickhouse.ClickHouseStatementImpl.executeQuery(ClickHouseStatementImpl.java:87)
at ru.yandex.clickhouse.ClickHousePreparedStatementImpl.executeQuery(ClickHousePreparedStatementImpl.java:128)
at org.apache.dolphinscheduler.plugin.task.datax.DataxTask.tryExecuteSqlResolveColumnNames(DataxTask.java:552)
at org.apache.dolphinscheduler.plugin.task.datax.DataxTask.parsingSqlColumnNames(DataxTask.java:458)
at org.apache.dolphinscheduler.plugin.task.datax.DataxTask.buildDataxJobContentJson(DataxTask.java:279)
at org.apache.dolphinscheduler.plugin.task.datax.DataxTask.buildDataxJsonFile(DataxTask.java:210)
at org.apache.dolphinscheduler.plugin.task.datax.DataxTask.handle(DataxTask.java:161)
at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:226)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.Throwable: Code: 41, e.displayText() = DB::ParsingException: Cannot parse datetime: Cannot parse UInt32 from String: While processing (paymentType != 'yb') AND (datatime >= (toUnixTimestamp(concat('${dtdh}', ':00:00')) * 1000)) AND (datatime < (toUnixTimestamp(concat('2022-01-17 19', ':59:59')) * 1000)) (version 21.6.6.51 (official build))
at ru.yandex.clickhouse.except.ClickHouseExceptionSpecifier.specify(ClickHouseExceptionSpecifier.java:53)
... 19 common frames omitted
### How to reproduce
create clickhouse datax task. AND use time Parameter In "WHERE " filter
### Anything else
I had fix this bug.
please edit DataxUtils.class and update getSqlStatementParser function.
>
public static SQLStatementParser getSqlStatementParser(DbType dbType, String sql) {
switch (dbType) {
case MYSQL:
return new MySqlStatementParser(sql);
case POSTGRESQL:
return new PGSQLStatementParser(sql);
case ORACLE:
return new OracleStatementParser(sql);
case SQLSERVER:
return new SQLServerStatementParser(sql);
case CLICKHOUSE:
return new ClickhouseStatementParser(sql);
default:
return null;
}
}
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8092 | https://github.com/apache/dolphinscheduler/pull/8091 | e0a99dd9093dacd161512535b0046699f3cf26af | f1a02159783e2a01094a06eb7681d4532cdbbff4 | "2022-01-17T13:16:14Z" | java | "2022-01-18T08:53:25Z" | dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.datax;
import org.apache.dolphinscheduler.spi.enums.DbType;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser;
import com.alibaba.druid.sql.dialect.sqlserver.parser.SQLServerStatementParser;
import com.alibaba.druid.sql.parser.SQLStatementParser;
public class DataxUtils {
public static final String DATAX_READER_PLUGIN_MYSQL = "mysqlreader";
public static final String DATAX_READER_PLUGIN_POSTGRESQL = "postgresqlreader";
public static final String DATAX_READER_PLUGIN_ORACLE = "oraclereader";
public static final String DATAX_READER_PLUGIN_SQLSERVER = "sqlserverreader";
public static final String DATAX_READER_PLUGIN_CLICKHOUSE = "clickhousereader";
public static final String DATAX_WRITER_PLUGIN_MYSQL = "mysqlwriter";
public static final String DATAX_WRITER_PLUGIN_POSTGRESQL = "postgresqlwriter";
public static final String DATAX_WRITER_PLUGIN_ORACLE = "oraclewriter";
public static final String DATAX_WRITER_PLUGIN_SQLSERVER = "sqlserverwriter";
public static final String DATAX_WRITER_PLUGIN_CLICKHOUSE = "clickhousewriter";
public static String getReaderPluginName(DbType dbType) {
switch (dbType) {
case MYSQL:
return DATAX_READER_PLUGIN_MYSQL;
case POSTGRESQL:
return DATAX_READER_PLUGIN_POSTGRESQL;
case ORACLE:
return DATAX_READER_PLUGIN_ORACLE;
case SQLSERVER:
return DATAX_READER_PLUGIN_SQLSERVER;
case CLICKHOUSE:
return DATAX_READER_PLUGIN_CLICKHOUSE;
default:
return null;
}
}
public static String getWriterPluginName(DbType dbType) {
switch (dbType) {
case MYSQL:
return DATAX_WRITER_PLUGIN_MYSQL;
case POSTGRESQL:
return DATAX_WRITER_PLUGIN_POSTGRESQL;
case ORACLE:
return DATAX_WRITER_PLUGIN_ORACLE;
case SQLSERVER:
return DATAX_WRITER_PLUGIN_SQLSERVER;
case CLICKHOUSE:
return DATAX_WRITER_PLUGIN_CLICKHOUSE;
default:
return null;
}
}
public static SQLStatementParser getSqlStatementParser(DbType dbType, String sql) {
switch (dbType) {
case MYSQL:
return new MySqlStatementParser(sql);
case POSTGRESQL:
return new PGSQLStatementParser(sql);
case ORACLE:
return new OracleStatementParser(sql);
case SQLSERVER:
return new SQLServerStatementParser(sql);
default:
return null;
}
}
public static String[] convertKeywordsColumns(DbType dbType, String[] columns) {
if (columns == null) {
return null;
}
String[] toColumns = new String[columns.length];
for (int i = 0; i < columns.length; i++) {
toColumns[i] = doConvertKeywordsColumn(dbType, columns[i]);
}
return toColumns;
}
public static String doConvertKeywordsColumn(DbType dbType, String column) {
if (column == null) {
return column;
}
column = column.trim();
column = column.replace("`", "");
column = column.replace("\"", "");
column = column.replace("'", "");
switch (dbType) {
case MYSQL:
return String.format("`%s`", column);
case POSTGRESQL:
return String.format("\"%s\"", column);
case ORACLE:
return String.format("\"%s\"", column);
case SQLSERVER:
return String.format("`%s`", column);
default:
return column;
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,113 | [Feature][UI Next] Feature graph component. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
_No response_
### Use case
_No response_
### Related issues
[#7332](https://github.com/apache/dolphinscheduler/issues/7332)
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8113 | https://github.com/apache/dolphinscheduler/pull/8114 | 66bc76cedad0b698d53ef1968fac7a5b80f7b83f | d6e1d955117a74ee487e1d14337ab7a7d9c18e73 | "2022-01-19T03:04:31Z" | java | "2022-01-19T03:27:25Z" | dolphinscheduler-ui-next/src/components/chart/modules/Graph.tsx | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,098 | [Bug] [UI] udf sub resource manage edit modal can't close | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I renamed the UDF sub resource, I clicked the Cancel Modal, but it didn't respond. as follows:
![image](https://user-images.githubusercontent.com/8847400/149877229-f8971af8-1a24-4e05-9cce-6fc726512bce.png)
### What you expected to happen
Modal close.
### How to reproduce
Open `UDF manage` -> `resource manage`, click resources to enter the list of sub-resources. click sub-resource edit operate and then click cancel button.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8098 | https://github.com/apache/dolphinscheduler/pull/8107 | 98ae33112c855255a2fdd3407b7defe48fa65b77 | a77b0ef93bb2348b971781ded27758061f783696 | "2022-01-18T05:41:57Z" | java | "2022-01-20T02:19:02Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/subUdfDirectory/_source/rename.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popup :ok-text="$t('Rename')" :nameText="$t('Rename')" @ok="_ok" @close="close" :asyn-loading="true">
<template slot="content">
<div class="resource-rename-model">
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Name')}}</template>
<template slot="content">
<el-input
type="input"
v-model="name"
maxlength="60"
size="small"
:placeholder="$t('Please enter name')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Description')}}</template>
<template slot="content">
<el-input
type="textarea"
v-model="description"
size="small"
:placeholder="$t('Please enter description')">
</el-input>
</template>
</m-list-box-f>
</div>
</template>
</m-popup>
</template>
<script>
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import localStore from '@/module/util/localStorage'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
export default {
name: 'resource-udf-rename',
data () {
return {
store,
description: '',
name: ''
}
},
props: {
item: Object
},
methods: {
_ok (fn) {
this._verification().then(res => {
if (this.name === this.item.alias) {
return new Promise((resolve, reject) => {
this.description === this.item.description ? reject({ msg: '内容未修改' }) : resolve()
})
} else {
return this.store.dispatch('resource/resourceVerifyName', {
fullName: localStore.getItem('currentDir') + '/' + this.name,
type: 'UDF'
})
}
}).then(res => {
return this.store.dispatch('resource/resourceRename', {
name: this.name,
description: this.description,
id: this.item.id,
type: 'UDF'
})
}).then(res => {
this.$message.success(res.msg)
this.$emit('onUpDate', res.data)
fn()
}).catch(e => {
fn()
this.$message.error(e.msg || '')
})
},
_verification () {
return new Promise((resolve, reject) => {
if (!this.name) {
reject({ // eslint-disable-line
msg: `${i18n.$t('Please enter resource name')}`
})
} else {
resolve()
}
})
},
close () {
this.renameDialog = false
}
},
watch: {},
created () {
let item = this.item || {}
if (item) {
this.name = item.alias
this.description = item.description
}
},
mounted () {
},
components: { mPopup, mListBoxF }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,119 | [Bug] [API] Update ProcessInstance error and without reason output | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://ftp.bmp.ovh/imgs/2022/01/eca77fe88da36409.jpg)
Update ProcessInstance error.And there is no cause for error.
### What you expected to happen
Update ProcessInstance error
### How to reproduce
create a new task in process instance edit page and link to another task.
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8119 | https://github.com/apache/dolphinscheduler/pull/8122 | 38b40e7ac49390164207104a4a788639b12696c4 | b5631d5281e84b72afa44a3d37631ffdbfd178ca | "2022-01-19T07:32:41Z" | java | "2022-01-21T01:32:38Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.multipart.MultipartFile;
/**
* process definition service
*/
public interface ProcessDefinitionService {
/**
* create process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return create result code
*/
Map<String, Object> createProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType);
/**
* query process definition list
*
* @param loginUser login user
* @param projectCode project code
* @return definition list
*/
Map<String, Object> queryProcessDefinitionList(User loginUser,
long projectCode);
/**
* query process definition simple list
*
* @param loginUser login user
* @param projectCode project code
* @return definition simple list
*/
Map<String, Object> queryProcessDefinitionSimpleList(User loginUser,
long projectCode);
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectCode project code
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
Result queryProcessDefinitionListPaging(User loginUser,
long projectCode,
String searchVal,
Integer userId,
Integer pageNo,
Integer pageSize);
/**
* query detail of process definition
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return process definition detail
*/
Map<String, Object> queryProcessDefinitionByCode(User loginUser,
long projectCode,
long code);
/**
* query detail of process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @return process definition detail
*/
Map<String, Object> queryProcessDefinitionByName(User loginUser,
long projectCode,
String name);
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
Map<String, Object> batchCopyProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode);
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
Map<String, Object> batchMoveProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode);
/**
* update process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return update result code
*/
Map<String, Object> updateProcessDefinition(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType);
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectCode project code
* @param name name
* @return true if process definition name not exists, otherwise false
*/
Map<String, Object> verifyProcessDefinitionName(User loginUser,
long projectCode,
String name);
/**
* delete process definition by code
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return delete result code
*/
Map<String, Object> deleteProcessDefinitionByCode(User loginUser,
long projectCode,
long code);
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState release state
* @return release result code
*/
Map<String, Object> releaseProcessDefinition(User loginUser,
long projectCode,
long code,
ReleaseState releaseState);
/**
* batch export process definition by codes
*
* @param loginUser login user
* @param projectCode project code
* @param codes process definition codes
* @param response http servlet response
*/
void batchExportProcessDefinitionByCodes(User loginUser,
long projectCode,
String codes,
HttpServletResponse response);
/**
* import process definition
*
* @param loginUser login user
* @param projectCode project code
* @param file process metadata json file
* @return import process
*/
Map<String, Object> importProcessDefinition(User loginUser,
long projectCode,
MultipartFile file);
/**
* import sql process definition
*
* @param loginUser login user
* @param projectCode project code
* @param file sql file, zip
* @return import process
*/
Map<String, Object> importSqlProcessDefinition(User loginUser,
long projectCode,
MultipartFile file);
/**
* check the process task relation json
*
* @param processTaskRelationJson process task relation json
* @return check result code
*/
Map<String, Object> checkProcessNodeList(String processTaskRelationJson);
/**
* get task node details based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param code processDefinition code
* @return task node list
*/
Map<String, Object> getTaskNodeListByDefinitionCode(User loginUser,
long projectCode,
long code);
/**
* get task node details map based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param codes define code list
* @return task node list
*/
Map<String, Object> getNodeListMapByDefinitionCodes(User loginUser,
long projectCode,
String codes);
/**
* query process definition all by project code
*
* @param projectCode project code
* @return process definitions in the project
*/
Map<String, Object> queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode);
/**
* Encapsulates the TreeView structure
*
* @param projectCode project code
* @param code process definition code
* @param limit limit
* @return tree view json data
*/
Map<String, Object> viewTree(long projectCode, long code, Integer limit);
/**
* switch the defined process definition version
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param version the version user want to switch
* @return switch process definition version result code
*/
Map<String, Object> switchProcessDefinitionVersion(User loginUser,
long projectCode,
long code,
int version);
/**
* query the pagination versions info by one certain process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param pageNo page number
* @param pageSize page size
* @param code process definition code
* @return the pagination process definition versions info of the certain process definition
*/
Result queryProcessDefinitionVersions(User loginUser,
long projectCode,
int pageNo,
int pageSize,
long code);
/**
* delete one certain process definition by version number and process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param code process definition code
* @param version version number
* @return delele result code
*/
Map<String, Object> deleteProcessDefinitionVersion(User loginUser,
long projectCode,
long code,
int version);
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
Map<String, Object> createEmptyProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType);
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @param executionType executionType
* @return update result code
*/
Map<String, Object> updateProcessDefinitionBasicInfo(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType);
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
Map<String, Object> releaseWorkflowAndSchedule(User loginUser,
long projectCode,
long code,
ReleaseState releaseState);
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,119 | [Bug] [API] Update ProcessInstance error and without reason output | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://ftp.bmp.ovh/imgs/2022/01/eca77fe88da36409.jpg)
Update ProcessInstance error.And there is no cause for error.
### What you expected to happen
Update ProcessInstance error
### How to reproduce
create a new task in process instance edit page and link to another task.
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8119 | https://github.com/apache/dolphinscheduler/pull/8122 | 38b40e7ac49390164207104a4a788639b12696c4 | b5631d5281e84b72afa44a3d37631ffdbfd178ca | "2022-01-19T07:32:41Z" | java | "2022-01-21T01:32:38Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import org.apache.dolphinscheduler.api.dto.DagDataSchedule;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ConditionType;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.TimeoutFlag;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlType;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Lists;
/**
* process definition service impl
*/
@Service
public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionLogMapper processDefinitionLogMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProcessInstanceService processInstanceService;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProcessService processService;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private ProcessTaskRelationLogMapper processTaskRelationLogMapper;
@Autowired
TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private SchedulerService schedulerService;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
/**
* create process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return create result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> createProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
Map<String, Object> checkTaskDefinitions = checkTaskDefinitionList(taskDefinitionLogs, taskDefinitionJson);
if (checkTaskDefinitions.get(Constants.STATUS) != Status.SUCCESS) {
return checkTaskDefinitions;
}
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
Map<String, Object> checkRelationJson = checkTaskRelationList(taskRelationList, taskRelationJson, taskDefinitionLogs);
if (checkRelationJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkRelationJson;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, locations, timeout, loginUser.getId(), tenantId);
processDefinition.setExecutionType(executionType);
return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs);
}
private Map<String, Object> createDagDefine(User loginUser,
List<ProcessTaskRelationLog> taskRelationList,
ProcessDefinition processDefinition,
List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(),
insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
return result;
}
private Map<String, Object> checkTaskDefinitionList(List<TaskDefinitionLog> taskDefinitionLogs, String taskDefinitionJson) {
Map<String, Object> result = new HashMap<>();
try {
if (taskDefinitionLogs.isEmpty()) {
logger.error("taskDefinitionJson invalid: {}", taskDefinitionJson);
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) {
logger.error("task definition {} parameter invalid", taskDefinitionLog.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
private Map<String, Object> checkTaskRelationList(List<ProcessTaskRelationLog> taskRelationList, String taskRelationJson, List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
try {
if (taskRelationList == null || taskRelationList.isEmpty()) {
logger.error("task relation list is null");
putMsg(result, Status.DATA_IS_NOT_VALID, taskRelationJson);
return result;
}
List<ProcessTaskRelation> processTaskRelations = taskRelationList.stream()
.map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class))
.collect(Collectors.toList());
List<TaskNode> taskNodeList = processService.transformTask(processTaskRelations, taskDefinitionLogs);
if (taskNodeList.size() != taskRelationList.size()) {
Set<Long> postTaskCodes = taskRelationList.stream().map(ProcessTaskRelationLog::getPostTaskCode).collect(Collectors.toSet());
Set<Long> taskNodeCodes = taskNodeList.stream().map(TaskNode::getCode).collect(Collectors.toSet());
Collection<Long> codes = CollectionUtils.subtract(postTaskCodes, taskNodeCodes);
if (CollectionUtils.isNotEmpty(codes)) {
logger.error("the task code is not exist");
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, org.apache.commons.lang.StringUtils.join(codes, Constants.COMMA));
return result;
}
}
if (graphHasCycle(taskNodeList)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the task relation json is normal
for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) {
if (processTaskRelationLog.getPostTaskCode() == 0) {
logger.error("the post_task_code or post_task_version can't be zero");
putMsg(result, Status.CHECK_PROCESS_TASK_RELATION_ERROR);
return result;
}
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* query process definition list
*
* @param loginUser login user
* @param projectCode project code
* @return definition list
*/
@Override
public Map<String, Object> queryProcessDefinitionList(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> resourceList = processDefinitionMapper.queryAllDefinitionList(projectCode);
List<DagData> dagDataList = resourceList.stream().map(processService::genDagData).collect(Collectors.toList());
result.put(Constants.DATA_LIST, dagDataList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition simple list
*
* @param loginUser login user
* @param projectCode project code
* @return definition simple list
*/
@Override
public Map<String, Object> queryProcessDefinitionSimpleList(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryAllDefinitionList(projectCode);
ArrayNode arrayNode = JSONUtils.createArrayNode();
for (ProcessDefinition processDefinition : processDefinitions) {
ObjectNode processDefinitionNode = JSONUtils.createObjectNode();
processDefinitionNode.put("id", processDefinition.getId());
processDefinitionNode.put("code", processDefinition.getCode());
processDefinitionNode.put("name", processDefinition.getName());
processDefinitionNode.put("projectCode", processDefinition.getProjectCode());
arrayNode.add(processDefinitionNode);
}
result.put(Constants.DATA_LIST, arrayNode);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectCode project code
* @param searchVal search value
* @param userId user id
* @param pageNo page number
* @param pageSize page size
* @return process definition page
*/
@Override
public Result queryProcessDefinitionListPaging(User loginUser, long projectCode, String searchVal, Integer userId, Integer pageNo, Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(
page, searchVal, userId, project.getCode(), isAdmin(loginUser));
List<ProcessDefinition> records = processDefinitionIPage.getRecords();
for (ProcessDefinition pd : records) {
ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(pd.getCode(), pd.getVersion());
User user = userMapper.selectById(processDefinitionLog.getOperator());
pd.setModifyBy(user.getUserName());
}
processDefinitionIPage.setRecords(records);
PageInfo<ProcessDefinition> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) processDefinitionIPage.getTotal());
pageInfo.setTotalList(processDefinitionIPage.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query detail of process definition
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return process definition detail
*/
@Override
public Map<String, Object> queryProcessDefinitionByCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
} else {
Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId());
if (tenant != null) {
processDefinition.setTenantCode(tenant.getTenantCode());
}
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Map<String, Object> queryProcessDefinitionByName(User loginUser, long projectCode, String name) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, name);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, name);
} else {
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* update process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @param taskRelationJson relation json for nodes
* @param taskDefinitionJson taskDefinitionJson
* @return update result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateProcessDefinition(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
String locations,
int timeout,
String tenantCode,
String taskRelationJson,
String taskDefinitionJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
Map<String, Object> checkTaskDefinitions = checkTaskDefinitionList(taskDefinitionLogs, taskDefinitionJson);
if (checkTaskDefinitions.get(Constants.STATUS) != Status.SUCCESS) {
return checkTaskDefinitions;
}
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
Map<String, Object> checkRelationJson = checkTaskRelationList(taskRelationList, taskRelationJson, taskDefinitionLogs);
if (checkRelationJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkRelationJson;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
}
if (!name.equals(processDefinition.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
}
ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
processDefinition.set(projectCode, name, description, globalParams, locations, timeout, tenantId);
processDefinition.setExecutionType(executionType);
return updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, taskDefinitionLogs);
}
private Map<String, Object> updateDagDefine(User loginUser,
List<ProcessTaskRelationLog> taskRelationList,
ProcessDefinition processDefinition,
ProcessDefinition processDefinitionDeepCopy,
List<TaskDefinitionLog> taskDefinitionLogs) {
Map<String, Object> result = new HashMap<>();
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
int insertVersion;
if (processDefinition.equals(processDefinitionDeepCopy)) {
insertVersion = processDefinitionDeepCopy.getVersion();
ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(processDefinition.getCode(), insertVersion);
processDefinitionLog.setOperator(loginUser.getId());
processDefinitionLog.setOperateTime(new Date());
int update = processDefinitionLogMapper.updateById(processDefinitionLog);
} else {
processDefinition.setUpdateTime(new Date());
insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
}
if (insertVersion == 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectCode project code
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@Override
public Map<String, Object> verifyProcessDefinitionName(User loginUser, long projectCode, String name) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.verifyByDefineName(project.getCode(), name.trim());
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name.trim());
}
return result;
}
/**
* delete process definition by code
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
// Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, code);
return result;
}
// check process instances is already running
List<ProcessInstance> processInstances = processInstanceService.queryByProcessDefineCodeAndStatus(processDefinition.getCode(), Constants.NOT_TERMINATED_STATES);
if (CollectionUtils.isNotEmpty(processInstances)) {
putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_CODE_FAIL, processInstances.size());
return result;
}
// get the timing according to the process definition
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj != null) {
if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(scheduleObj.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR);
}
}
if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, scheduleObj.getId());
return result;
}
}
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode());
if (deleteRelation == 0) {
logger.warn("The process definition has not relation, it will be delete successfully");
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState release state
* @return release result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> releaseProcessDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code);
if (updateProcess > 0 && schedule != null) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, schedule.getId(), code);
// set status
schedule.setReleaseState(releaseState);
int updateSchedule = scheduleMapper.updateById(schedule);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
}
schedulerService.deleteSchedule(project.getId(), schedule.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* batch export process definition by codes
*/
@Override
public void batchExportProcessDefinitionByCodes(User loginUser, long projectCode, String codes, HttpServletResponse response) {
if (org.apache.commons.lang.StringUtils.isEmpty(codes)) {
return;
}
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return;
}
Set<Long> defineCodeSet = Lists.newArrayList(codes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
return;
}
// check processDefinition exist in project
List<ProcessDefinition> processDefinitionListInProject = processDefinitionList.stream().filter(o -> projectCode == o.getProjectCode()).collect(Collectors.toList());
List<DagDataSchedule> dagDataSchedules = processDefinitionListInProject.stream().map(this::exportProcessDagData).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(dagDataSchedules)) {
downloadProcessDefinitionFile(response, dagDataSchedules);
}
}
/**
* download the process definition file
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<DagDataSchedule> dagDataSchedules) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(JSONUtils.toJsonString(dagDataSchedules).getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
} finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
}
}
/**
* get export process dag data
*
* @param processDefinition process definition
* @return DagDataSchedule
*/
public DagDataSchedule exportProcessDagData(ProcessDefinition processDefinition) {
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode());
DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(processDefinition));
if (scheduleObj != null) {
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
dagDataSchedule.setSchedule(scheduleObj);
}
return dagDataSchedule;
}
/**
* import process definition
*
* @param loginUser login user
* @param projectCode project code
* @param file process metadata json file
* @return import process
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, long projectCode, MultipartFile file) {
Map<String, Object> result = new HashMap<>();
String dagDataScheduleJson = FileUtils.file2String(file);
List<DagDataSchedule> dagDataScheduleList = JSONUtils.toList(dagDataScheduleJson, DagDataSchedule.class);
//check file content
if (CollectionUtils.isEmpty(dagDataScheduleList)) {
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
for (DagDataSchedule dagDataSchedule : dagDataScheduleList) {
if (!checkAndImport(loginUser, projectCode, result, dagDataSchedule)) {
return result;
}
}
return result;
}
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importSqlProcessDefinition(User loginUser, long projectCode, MultipartFile file) {
Map<String, Object> result = new HashMap<>();
String processDefinitionName = file.getOriginalFilename() == null ? file.getName() : file.getOriginalFilename();
int index = processDefinitionName.lastIndexOf(".");
if (index > 0) {
processDefinitionName = processDefinitionName.substring(0, index);
}
processDefinitionName = processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp();
ProcessDefinition processDefinition;
List<TaskDefinitionLog> taskDefinitionList = new ArrayList<>();
List<ProcessTaskRelationLog> processTaskRelationList = new ArrayList<>();
// for Zip Bomb Attack
final int THRESHOLD_ENTRIES = 10000;
final int THRESHOLD_SIZE = 1000000000; // 1 GB
final double THRESHOLD_RATIO = 10;
int totalEntryArchive = 0;
int totalSizeEntry = 0;
// In most cases, there will be only one data source
Map<String, DataSource> dataSourceCache = new HashMap<>(1);
Map<String, Long> taskNameToCode = new HashMap<>(16);
Map<String, List<String>> taskNameToUpstream = new HashMap<>(16);
try (ZipInputStream zIn = new ZipInputStream(file.getInputStream());
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(zIn))) {
// build process definition
processDefinition = new ProcessDefinition(projectCode,
processDefinitionName,
CodeGenerateUtils.getInstance().genCode(),
"",
"[]", null,
0, loginUser.getId(), loginUser.getTenantId());
ZipEntry entry;
while ((entry = zIn.getNextEntry()) != null) {
totalEntryArchive++;
int totalSizeArchive = 0;
if (!entry.isDirectory()) {
StringBuilder sql = new StringBuilder();
String taskName = null;
String datasourceName = null;
List<String> upstreams = Collections.emptyList();
String line;
while ((line = bufferedReader.readLine()) != null) {
int nBytes = line.getBytes(StandardCharsets.UTF_8).length;
totalSizeEntry += nBytes;
totalSizeArchive += nBytes;
long compressionRatio = totalSizeEntry / entry.getCompressedSize();
if (compressionRatio > THRESHOLD_RATIO) {
throw new IllegalStateException("ratio between compressed and uncompressed data is highly suspicious, looks like a Zip Bomb Attack");
}
int commentIndex = line.indexOf("-- ");
if (commentIndex >= 0) {
int colonIndex = line.indexOf(":", commentIndex);
if (colonIndex > 0) {
String key = line.substring(commentIndex + 3, colonIndex).trim().toLowerCase();
String value = line.substring(colonIndex + 1).trim();
switch (key) {
case "name":
taskName = value;
line = line.substring(0, commentIndex);
break;
case "upstream":
upstreams = Arrays.stream(value.split(",")).map(String::trim)
.filter(s -> !"".equals(s)).collect(Collectors.toList());
line = line.substring(0, commentIndex);
break;
case "datasource":
datasourceName = value;
line = line.substring(0, commentIndex);
break;
default:
break;
}
}
}
if (!"".equals(line)) {
sql.append(line).append("\n");
}
}
// import/sql1.sql -> sql1
if (taskName == null) {
taskName = entry.getName();
index = taskName.indexOf("/");
if (index > 0) {
taskName = taskName.substring(index + 1);
}
index = taskName.lastIndexOf(".");
if (index > 0) {
taskName = taskName.substring(0, index);
}
}
DataSource dataSource = dataSourceCache.get(datasourceName);
if (dataSource == null) {
dataSource = queryDatasourceByNameAndUser(datasourceName, loginUser);
}
if (dataSource == null) {
putMsg(result, Status.DATASOURCE_NAME_ILLEGAL);
return result;
}
dataSourceCache.put(datasourceName, dataSource);
TaskDefinitionLog taskDefinition = buildNormalSqlTaskDefinition(taskName, dataSource, sql.substring(0, sql.length() - 1));
taskDefinitionList.add(taskDefinition);
taskNameToCode.put(taskDefinition.getName(), taskDefinition.getCode());
taskNameToUpstream.put(taskDefinition.getName(), upstreams);
}
if (totalSizeArchive > THRESHOLD_SIZE) {
throw new IllegalStateException("the uncompressed data size is too much for the application resource capacity");
}
if (totalEntryArchive > THRESHOLD_ENTRIES) {
throw new IllegalStateException("too much entries in this archive, can lead to inodes exhaustion of the system");
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return result;
}
// build task relation
for (Map.Entry<String, Long> entry : taskNameToCode.entrySet()) {
List<String> upstreams = taskNameToUpstream.get(entry.getKey());
if (CollectionUtils.isEmpty(upstreams)
|| (upstreams.size() == 1 && upstreams.contains("root") && !taskNameToCode.containsKey("root"))) {
ProcessTaskRelationLog processTaskRelation = buildNormalTaskRelation(0, entry.getValue());
processTaskRelationList.add(processTaskRelation);
continue;
}
for (String upstream : upstreams) {
ProcessTaskRelationLog processTaskRelation = buildNormalTaskRelation(taskNameToCode.get(upstream), entry.getValue());
processTaskRelationList.add(processTaskRelation);
}
}
return createDagDefine(loginUser, processTaskRelationList, processDefinition, taskDefinitionList);
}
private ProcessTaskRelationLog buildNormalTaskRelation(long preTaskCode, long postTaskCode) {
ProcessTaskRelationLog processTaskRelation = new ProcessTaskRelationLog();
processTaskRelation.setPreTaskCode(preTaskCode);
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPostTaskCode(postTaskCode);
processTaskRelation.setPostTaskVersion(0);
processTaskRelation.setConditionType(ConditionType.NONE);
processTaskRelation.setName("");
return processTaskRelation;
}
private DataSource queryDatasourceByNameAndUser(String datasourceName, User loginUser) {
if (isAdmin(loginUser)) {
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(datasourceName);
if (CollectionUtils.isNotEmpty(dataSources)) {
return dataSources.get(0);
}
} else {
return dataSourceMapper.queryDataSourceByNameAndUserId(loginUser.getId(), datasourceName);
}
return null;
}
private TaskDefinitionLog buildNormalSqlTaskDefinition(String taskName, DataSource dataSource, String sql) throws CodeGenerateException {
TaskDefinitionLog taskDefinition = new TaskDefinitionLog();
taskDefinition.setName(taskName);
taskDefinition.setFlag(Flag.YES);
SqlParameters sqlParameters = new SqlParameters();
sqlParameters.setType(dataSource.getType().name());
sqlParameters.setDatasource(dataSource.getId());
sqlParameters.setSql(sql.substring(0, sql.length() - 1));
// it may be a query type, but it can only be determined by parsing SQL
sqlParameters.setSqlType(SqlType.NON_QUERY.ordinal());
sqlParameters.setLocalParams(Collections.emptyList());
taskDefinition.setTaskParams(JSONUtils.toJsonString(sqlParameters));
taskDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
taskDefinition.setTaskType(TaskType.SQL.getDesc());
taskDefinition.setFailRetryTimes(0);
taskDefinition.setFailRetryInterval(0);
taskDefinition.setTimeoutFlag(TimeoutFlag.CLOSE);
taskDefinition.setWorkerGroup(DEFAULT_WORKER_GROUP);
taskDefinition.setTaskPriority(Priority.MEDIUM);
taskDefinition.setEnvironmentCode(-1);
taskDefinition.setTimeout(0);
taskDefinition.setDelayTime(0);
taskDefinition.setTimeoutNotifyStrategy(TaskTimeoutStrategy.WARN);
taskDefinition.setVersion(0);
taskDefinition.setResourceIds("");
return taskDefinition;
}
/**
* check and import
*/
private boolean checkAndImport(User loginUser, long projectCode, Map<String, Object> result, DagDataSchedule dagDataSchedule) {
if (!checkImportanceParams(dagDataSchedule, result)) {
return false;
}
ProcessDefinition processDefinition = dagDataSchedule.getProcessDefinition();
//unique check
Map<String, Object> checkResult = verifyProcessDefinitionName(loginUser, projectCode, processDefinition.getName());
if (Status.SUCCESS.equals(checkResult.get(Constants.STATUS))) {
putMsg(result, Status.SUCCESS);
} else {
result.putAll(checkResult);
return false;
}
String processDefinitionName = recursionProcessDefinitionName(projectCode, processDefinition.getName(), 1);
processDefinition.setName(processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp());
processDefinition.setId(0);
processDefinition.setProjectCode(projectCode);
processDefinition.setUserId(loginUser.getId());
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
return false;
}
List<TaskDefinition> taskDefinitionList = dagDataSchedule.getTaskDefinitionList();
Map<Long, Long> taskCodeMap = new HashMap<>();
Date now = new Date();
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
for (TaskDefinition taskDefinition : taskDefinitionList) {
TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog(taskDefinition);
taskDefinitionLog.setName(taskDefinitionLog.getName() + "_import_" + DateUtils.getCurrentTimeStamp());
taskDefinitionLog.setProjectCode(projectCode);
taskDefinitionLog.setUserId(loginUser.getId());
taskDefinitionLog.setVersion(Constants.VERSION_FIRST);
taskDefinitionLog.setCreateTime(now);
taskDefinitionLog.setUpdateTime(now);
taskDefinitionLog.setOperator(loginUser.getId());
taskDefinitionLog.setOperateTime(now);
try {
long code = CodeGenerateUtils.getInstance().genCode();
taskCodeMap.put(taskDefinitionLog.getCode(), code);
taskDefinitionLog.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Task code get error, ", e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code");
return false;
}
taskDefinitionLogList.add(taskDefinitionLog);
}
int insert = taskDefinitionMapper.batchInsert(taskDefinitionLogList);
int logInsert = taskDefinitionLogMapper.batchInsert(taskDefinitionLogList);
if ((logInsert & insert) == 0) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
List<ProcessTaskRelation> taskRelationList = dagDataSchedule.getProcessTaskRelationList();
List<ProcessTaskRelationLog> taskRelationLogList = new ArrayList<>();
for (ProcessTaskRelation processTaskRelation : taskRelationList) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation);
if (taskCodeMap.containsKey(processTaskRelationLog.getPreTaskCode())) {
processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode()));
}
if (taskCodeMap.containsKey(processTaskRelationLog.getPostTaskCode())) {
processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode()));
}
processTaskRelationLog.setPreTaskVersion(Constants.VERSION_FIRST);
processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST);
taskRelationLogList.add(processTaskRelationLog);
}
if (StringUtils.isNotEmpty(processDefinition.getLocations()) && JSONUtils.checkJsonValid(processDefinition.getLocations())) {
ArrayNode arrayNode = JSONUtils.parseArray(processDefinition.getLocations());
ArrayNode newArrayNode = JSONUtils.createArrayNode();
for (int i = 0; i < arrayNode.size(); i++) {
ObjectNode newObjectNode = newArrayNode.addObject();
JsonNode jsonNode = arrayNode.get(i);
Long taskCode = taskCodeMap.get(jsonNode.get("taskCode").asLong());
if (Objects.nonNull(taskCode)) {
newObjectNode.put("taskCode", taskCode);
newObjectNode.set("x", jsonNode.get("x"));
newObjectNode.set("y", jsonNode.get("y"));
}
}
processDefinition.setLocations(newArrayNode.toString());
}
processDefinition.setCreateTime(new Date());
processDefinition.setUpdateTime(new Date());
Map<String, Object> createDagResult = createDagDefine(loginUser, taskRelationLogList, processDefinition, Lists.newArrayList());
if (Status.SUCCESS.equals(createDagResult.get(Constants.STATUS))) {
putMsg(createDagResult, Status.SUCCESS);
} else {
result.putAll(createDagResult);
throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR);
}
Schedule schedule = dagDataSchedule.getSchedule();
if (null != schedule) {
ProcessDefinition newProcessDefinition = processDefinitionMapper.queryByCode(processDefinition.getCode());
schedule.setProcessDefinitionCode(newProcessDefinition.getCode());
schedule.setUserId(loginUser.getId());
schedule.setCreateTime(now);
schedule.setUpdateTime(now);
int scheduleInsert = scheduleMapper.insert(schedule);
if (0 == scheduleInsert) {
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR);
}
}
return true;
}
/**
* check importance params
*/
private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map<String, Object> result) {
if (dagDataSchedule.getProcessDefinition() == null) {
putMsg(result, Status.DATA_IS_NULL, "ProcessDefinition");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getTaskDefinitionList())) {
putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) {
putMsg(result, Status.DATA_IS_NULL, "ProcessTaskRelationList");
return false;
}
return true;
}
private String recursionProcessDefinitionName(long projectCode, String processDefinitionName, int num) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName);
if (processDefinition != null) {
if (num > 1) {
String str = processDefinitionName.substring(0, processDefinitionName.length() - 3);
processDefinitionName = str + "(" + num + ")";
} else {
processDefinitionName = processDefinition.getName() + "(" + num + ")";
}
} else {
return processDefinitionName;
}
return recursionProcessDefinitionName(projectCode, processDefinitionName, num + 1);
}
/**
* check the process task relation json
*
* @param processTaskRelationJson process task relation json
* @return check result code
*/
@Override
public Map<String, Object> checkProcessNodeList(String processTaskRelationJson) {
Map<String, Object> result = new HashMap<>();
try {
if (processTaskRelationJson == null) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processTaskRelationJson);
return result;
}
List<ProcessTaskRelation> taskRelationList = JSONUtils.toList(processTaskRelationJson, ProcessTaskRelation.class);
// Check whether the task node is normal
List<TaskNode> taskNodes = processService.transformTask(taskRelationList, Lists.newArrayList());
if (CollectionUtils.isEmpty(taskNodes)) {
logger.error("process node info is empty");
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
// check has cycle
if (graphHasCycle(taskNodes)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the process definition json is normal
for (TaskNode taskNode : taskNodes) {
if (!CheckUtils.checkTaskNodeParameters(taskNode)) {
logger.error("task node {} parameter invalid", taskNode.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName());
return result;
}
// check extra params
CheckUtils.checkOtherParams(taskNode.getExtras());
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* get task node details based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param code process definition code
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionCode(User loginUser, long projectCode, long code) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
DagData dagData = processService.genDagData(processDefinition);
result.put(Constants.DATA_LIST, dagData.getTaskDefinitionList());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get task node details map based on process definition
*
* @param loginUser loginUser
* @param projectCode project code
* @param codes define codes
* @return task node list
*/
@Override
public Map<String, Object> getNodeListMapByDefinitionCodes(User loginUser, long projectCode, String codes) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
Set<Long> defineCodeSet = Lists.newArrayList(codes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.info("process definition not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
}
HashMap<Long, Project> userProjects = new HashMap<>(Constants.DEFAULT_HASH_MAP_SIZE);
projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId())
.forEach(userProject -> userProjects.put(userProject.getCode(), userProject));
// check processDefinition exist in project
List<ProcessDefinition> processDefinitionListInProject = processDefinitionList.stream()
.filter(o -> userProjects.containsKey(o.getProjectCode())).collect(Collectors.toList());
if (CollectionUtils.isEmpty(processDefinitionListInProject)) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
}
Map<Long, List<TaskDefinition>> taskNodeMap = new HashMap<>();
for (ProcessDefinition processDefinition : processDefinitionListInProject) {
DagData dagData = processService.genDagData(processDefinition);
taskNodeMap.put(processDefinition.getCode(), dagData.getTaskDefinitionList());
}
result.put(Constants.DATA_LIST, taskNodeMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition all by project code
*
* @param loginUser loginUser
* @param projectCode project code
* @return process definitions in the project
*/
@Override
public Map<String, Object> queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryAllDefinitionList(projectCode);
List<DagData> dagDataList = processDefinitions.stream().map(processService::genDagData).collect(Collectors.toList());
result.put(Constants.DATA_LIST, dagDataList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* Encapsulates the TreeView structure
*
* @param projectCode project code
* @param code process definition code
* @param limit limit
* @return tree view json data
*/
@Override
public Map<String, Object> viewTree(long projectCode, long code, Integer limit) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (null == processDefinition || projectCode != processDefinition.getProjectCode()) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
DAG<String, TaskNode, TaskNodeRelation> dag = processService.genDagGraph(processDefinition);
// nodes that is running
Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>();
//nodes that is waiting to run
Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>();
// List of process instances
List<ProcessInstance> processInstanceList = processInstanceService.queryByProcessDefineCode(code, limit);
processInstanceList.forEach(processInstance -> processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())));
List<TaskDefinitionLog> taskDefinitionList = processService.genTaskDefineList(processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()));
Map<Long, TaskDefinitionLog> taskDefinitionMap = taskDefinitionList.stream()
.collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog));
if (limit > processInstanceList.size()) {
limit = processInstanceList.size();
}
TreeViewDto parentTreeViewDto = new TreeViewDto();
parentTreeViewDto.setName("DAG");
parentTreeViewDto.setType("");
parentTreeViewDto.setCode(0L);
// Specify the process definition, because it is a TreeView for a process definition
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime();
parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), processInstance.getProcessDefinitionCode(),
"", processInstance.getState().toString(), processInstance.getStartTime(), endTime, processInstance.getHost(),
DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime())));
}
List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>();
parentTreeViewDtoList.add(parentTreeViewDto);
// Here is the encapsulation task instance
for (String startNode : dag.getBeginNode()) {
runningNodeMap.put(startNode, parentTreeViewDtoList);
}
while (Stopper.isRunning()) {
Set<String> postNodeList;
Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, List<TreeViewDto>> en = iter.next();
String nodeCode = en.getKey();
parentTreeViewDtoList = en.getValue();
TreeViewDto treeViewDto = new TreeViewDto();
TaskNode taskNode = dag.getNode(nodeCode);
treeViewDto.setType(taskNode.getType());
treeViewDto.setCode(taskNode.getCode());
treeViewDto.setName(taskNode.getName());
//set treeViewDto instances
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstance.getId(), Long.parseLong(nodeCode));
if (taskInstance == null) {
treeViewDto.getInstances().add(new Instance(-1, "not running", 0, "null"));
} else {
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
long subProcessCode = 0L;
// if process is sub process, the return sub id, or sub id=0
if (taskInstance.isSubProcess()) {
TaskDefinition taskDefinition = taskDefinitionMap.get(taskInstance.getTaskCode());
subProcessCode = Integer.parseInt(JSONUtils.parseObject(
taskDefinition.getTaskParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_CODE).asText());
}
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskCode(),
taskInstance.getTaskType(), taskInstance.getState().toString(), taskInstance.getStartTime(), taskInstance.getEndTime(),
taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessCode));
}
}
for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) {
pTreeViewDto.getChildren().add(treeViewDto);
}
postNodeList = dag.getSubsequentNodes(nodeCode);
if (CollectionUtils.isNotEmpty(postNodeList)) {
for (String nextNodeCode : postNodeList) {
List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeCode);
if (CollectionUtils.isEmpty(treeViewDtoList)) {
treeViewDtoList = new ArrayList<>();
}
treeViewDtoList.add(treeViewDto);
waitingRunningNodeMap.put(nextNodeCode, treeViewDtoList);
}
}
runningNodeMap.remove(nodeCode);
}
if (waitingRunningNodeMap.size() == 0) {
break;
} else {
runningNodeMap.putAll(waitingRunningNodeMap);
waitingRunningNodeMap.clear();
}
}
result.put(Constants.DATA_LIST, parentTreeViewDto);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
/**
* whether the graph has a ring
*
* @param taskNodeResponseList task node response list
* @return if graph has cycle flag
*/
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) {
DAG<String, TaskNode, String> graph = new DAG<>();
// Fill the vertices
for (TaskNode taskNodeResponse : taskNodeResponseList) {
graph.addNode(Long.toString(taskNodeResponse.getCode()), taskNodeResponse);
}
// Fill edge relations
for (TaskNode taskNodeResponse : taskNodeResponseList) {
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
if (CollectionUtils.isNotEmpty(preTasks)) {
for (String preTask : preTasks) {
if (!graph.addEdge(preTask, Long.toString(taskNodeResponse.getCode()))) {
return true;
}
}
}
}
return graph.hasCycle();
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
@Override
public Map<String, Object> batchCopyProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode) {
Map<String, Object> result = checkParams(loginUser, projectCode, codes, targetProjectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<String> failedProcessList = new ArrayList<>();
doBatchOperateProcessDefinition(loginUser, targetProjectCode, failedProcessList, codes, result, true);
if (result.get(Constants.STATUS) == Status.NOT_SUPPORT_COPY_TASK_TYPE) {
return result;
}
checkBatchOperateResult(projectCode, targetProjectCode, result, failedProcessList, true);
return result;
}
/**
* batch move process definition
* Will be deleted
* @param loginUser loginUser
* @param projectCode projectCode
* @param codes processDefinitionCodes
* @param targetProjectCode targetProjectCode
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> batchMoveProcessDefinition(User loginUser,
long projectCode,
String codes,
long targetProjectCode) {
Map<String, Object> result = checkParams(loginUser, projectCode, codes, targetProjectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (projectCode == targetProjectCode) {
return result;
}
List<String> failedProcessList = new ArrayList<>();
doBatchOperateProcessDefinition(loginUser, targetProjectCode, failedProcessList, codes, result, false);
checkBatchOperateResult(projectCode, targetProjectCode, result, failedProcessList, false);
return result;
}
private Map<String, Object> checkParams(User loginUser,
long projectCode,
String processDefinitionCodes,
long targetProjectCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (org.apache.commons.lang.StringUtils.isEmpty(processDefinitionCodes)) {
putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY, processDefinitionCodes);
return result;
}
if (projectCode != targetProjectCode) {
Project targetProject = projectMapper.queryByCode(targetProjectCode);
//check user access for project
Map<String, Object> targetResult = projectService.checkProjectAndAuth(loginUser, targetProject, targetProjectCode);
if (targetResult.get(Constants.STATUS) != Status.SUCCESS) {
return targetResult;
}
}
return result;
}
private void doBatchOperateProcessDefinition(User loginUser,
long targetProjectCode,
List<String> failedProcessList,
String processDefinitionCodes,
Map<String, Object> result,
boolean isCopy) {
Set<Long> definitionCodes = Arrays.stream(processDefinitionCodes.split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(definitionCodes);
Set<Long> queryCodes = processDefinitionList.stream().map(ProcessDefinition::getCode).collect(Collectors.toSet());
// definitionCodes - queryCodes
Set<Long> diffCode = definitionCodes.stream().filter(code -> !queryCodes.contains(code)).collect(Collectors.toSet());
diffCode.forEach(code -> failedProcessList.add(code + "[null]"));
for (ProcessDefinition processDefinition : processDefinitionList) {
List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode());
List<ProcessTaskRelationLog> taskRelationList = processTaskRelations.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
processDefinition.setProjectCode(targetProjectCode);
if (isCopy) {
List<TaskDefinitionLog> taskDefinitionLogs = processService.genTaskDefineList(processTaskRelations);
Map<Long, Long> taskCodeMap = new HashMap<>();
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (TaskType.CONDITIONS.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.SWITCH.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.SUB_PROCESS.getDesc().equals(taskDefinitionLog.getTaskType())
|| TaskType.DEPENDENT.getDesc().equals(taskDefinitionLog.getTaskType())) {
putMsg(result, Status.NOT_SUPPORT_COPY_TASK_TYPE, taskDefinitionLog.getTaskType());
return;
}
try {
long taskCode = CodeGenerateUtils.getInstance().genCode();
taskCodeMap.put(taskDefinitionLog.getCode(), taskCode);
taskDefinitionLog.setCode(taskCode);
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
taskDefinitionLog.setProjectCode(targetProjectCode);
taskDefinitionLog.setVersion(0);
taskDefinitionLog.setName(taskDefinitionLog.getName() + "_copy_" + DateUtils.getCurrentTimeStamp());
}
for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) {
if (processTaskRelationLog.getPreTaskCode() > 0) {
processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode()));
}
if (processTaskRelationLog.getPostTaskCode() > 0) {
processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode()));
}
}
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
processDefinition.setId(0);
processDefinition.setUserId(loginUser.getId());
processDefinition.setName(processDefinition.getName() + "_copy_" + DateUtils.getCurrentTimeStamp());
if (StringUtils.isNotBlank(processDefinition.getLocations())) {
ArrayNode jsonNodes = JSONUtils.parseArray(processDefinition.getLocations());
for (int i = 0; i < jsonNodes.size(); i++) {
ObjectNode node = (ObjectNode) jsonNodes.path(i);
node.put("taskCode", taskCodeMap.get(node.get("taskCode").asLong()));
jsonNodes.set(i, node);
}
processDefinition.setLocations(JSONUtils.toJsonString(jsonNodes));
}
try {
result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs));
} catch (Exception e) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.COPY_PROCESS_DEFINITION_ERROR);
}
} else {
try {
result.putAll(updateDagDefine(loginUser, taskRelationList, processDefinition, null, Lists.newArrayList()));
} catch (Exception e) {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.MOVE_PROCESS_DEFINITION_ERROR);
}
}
if (result.get(Constants.STATUS) != Status.SUCCESS) {
failedProcessList.add(processDefinition.getCode() + "[" + processDefinition.getName() + "]");
}
}
}
/**
* switch the defined process definition version
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param version the version user want to switch
* @return switch process definition version result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, long projectCode, long code, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR, code);
return result;
}
ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, version);
if (Objects.isNull(processDefinitionLog)) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR, processDefinition.getCode(), version);
return result;
}
int switchVersion = processService.switchVersion(processDefinition, processDefinitionLog);
if (switchVersion <= 0) {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
throw new ServiceException(Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check batch operate result
*
* @param srcProjectCode srcProjectCode
* @param targetProjectCode targetProjectCode
* @param result result
* @param failedProcessList failedProcessList
* @param isCopy isCopy
*/
private void checkBatchOperateResult(long srcProjectCode, long targetProjectCode,
Map<String, Object> result, List<String> failedProcessList, boolean isCopy) {
if (!failedProcessList.isEmpty()) {
if (isCopy) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, String.join(",", failedProcessList));
} else {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, String.join(",", failedProcessList));
}
} else {
putMsg(result, Status.SUCCESS);
}
}
/**
* query the pagination versions info by one certain process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param pageNo page number
* @param pageSize page size
* @param code process definition code
* @return the pagination process definition versions info of the certain process definition
*/
@Override
public Result queryProcessDefinitionVersions(User loginUser, long projectCode, int pageNo, int pageSize, long code) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
// check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
PageInfo<ProcessDefinitionLog> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<ProcessDefinitionLog> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinitionLog> processDefinitionVersionsPaging = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code, projectCode);
List<ProcessDefinitionLog> processDefinitionLogs = processDefinitionVersionsPaging.getRecords();
pageInfo.setTotalList(processDefinitionLogs);
pageInfo.setTotal((int) processDefinitionVersionsPaging.getTotal());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete one certain process definition by version number and process definition code
*
* @param loginUser login user info to check auth
* @param projectCode project code
* @param code process definition code
* @param version version number
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionVersion(User loginUser, long projectCode, long code, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
} else {
if (processDefinition.getVersion() == version) {
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
return result;
}
int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version);
int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(code, version);
if (deleteLog == 0 || deleteRelationLog == 0) {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* create empty process definition
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @return process definition code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> createEmptyProcessDefinition(User loginUser,
long projectCode,
String name,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
long processDefinitionCode;
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description,
globalParams, "", timeout, loginUser.getId(), tenantId);
processDefinition.setExecutionType(executionType);
result = createEmptyDagDefine(loginUser, processDefinition);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// save dag schedule
Map<String, Object> scheduleResult = createDagSchedule(loginUser, processDefinition, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> createEmptyDagDefine(User loginUser, ProcessDefinition processDefinition) {
Map<String, Object> result = new HashMap<>();
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
return result;
}
private Map<String, Object> createDagSchedule(User loginUser, ProcessDefinition processDefinition, String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule scheduleObj = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (scheduleObj == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
Date now = new Date();
scheduleObj.setProcessDefinitionCode(processDefinition.getCode());
if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) {
logger.error("{} verify failure", scheduleObj.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab());
return result;
}
scheduleObj.setWarningType(scheduleObj.getWarningType() == null ? WarningType.NONE : scheduleObj.getWarningType());
scheduleObj.setWarningGroupId(scheduleObj.getWarningGroupId() == 0 ? 1 : scheduleObj.getWarningGroupId());
scheduleObj.setFailureStrategy(scheduleObj.getFailureStrategy() == null ? FailureStrategy.CONTINUE : scheduleObj.getFailureStrategy());
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(scheduleObj.getProcessInstancePriority() == null ? Priority.MEDIUM : scheduleObj.getProcessInstancePriority());
scheduleObj.setWorkerGroup(scheduleObj.getWorkerGroup() == null ? "default" : scheduleObj.getWorkerGroup());
scheduleObj.setEnvironmentCode(scheduleObj.getEnvironmentCode() == null ? -1 : scheduleObj.getEnvironmentCode());
scheduleMapper.insert(scheduleObj);
putMsg(result, Status.SUCCESS);
result.put("scheduleId", scheduleObj.getId());
return result;
}
/**
* update process definition basic info
*
* @param loginUser login user
* @param projectCode project code
* @param name process definition name
* @param code process definition code
* @param description description
* @param globalParams globalParams
* @param timeout timeout
* @param tenantCode tenantCode
* @param scheduleJson scheduleJson
* @param executionType executionType
* @return update result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> updateProcessDefinitionBasicInfo(User loginUser,
long projectCode,
String name,
long code,
String description,
String globalParams,
int timeout,
String tenantCode,
String scheduleJson,
ProcessExecutionTypeEnum executionType) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
}
if (!name.equals(processDefinition.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
}
}
ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class);
processDefinition.set(projectCode, name, description, globalParams, "", timeout, tenantId);
processDefinition.setExecutionType(executionType);
List<ProcessTaskRelationLog> taskRelationList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion());
result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, Lists.newArrayList());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isBlank(scheduleJson)) {
return result;
}
// update dag schedule
Map<String, Object> scheduleResult = updateDagSchedule(loginUser, projectCode, code, scheduleJson);
if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) {
Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS);
putMsg(result, scheduleResultStatus);
throw new ServiceException(scheduleResultStatus);
}
return result;
}
private Map<String, Object> updateDagSchedule(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleJson) {
Map<String, Object> result = new HashMap<>();
Schedule schedule = JSONUtils.parseObject(scheduleJson, Schedule.class);
if (schedule == null) {
putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
// set default value
FailureStrategy failureStrategy = schedule.getFailureStrategy() == null ? FailureStrategy.CONTINUE : schedule.getFailureStrategy();
WarningType warningType = schedule.getWarningType() == null ? WarningType.NONE : schedule.getWarningType();
Priority processInstancePriority = schedule.getProcessInstancePriority() == null ? Priority.MEDIUM : schedule.getProcessInstancePriority();
int warningGroupId = schedule.getWarningGroupId() == 0 ? 1 : schedule.getWarningGroupId();
String workerGroup = schedule.getWorkerGroup() == null ? "default" : schedule.getWorkerGroup();
long environmentCode = schedule.getEnvironmentCode() == null ? -1 : schedule.getEnvironmentCode();
ScheduleParam param = new ScheduleParam();
param.setStartTime(schedule.getStartTime());
param.setEndTime(schedule.getEndTime());
param.setCrontab(schedule.getCrontab());
param.setTimezoneId(schedule.getTimezoneId());
return schedulerService.updateScheduleByProcessDefinitionCode(
loginUser,
projectCode,
processDefinitionCode,
JSONUtils.toJsonString(param),
warningType,
warningGroupId,
failureStrategy,
processInstancePriority,
workerGroup,
environmentCode);
}
/**
* release process definition and schedule
*
* @param loginUser login user
* @param projectCode project code
* @param code process definition code
* @param releaseState releaseState
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> releaseWorkflowAndSchedule(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code);
return result;
}
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj == null) {
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code);
return result;
}
switch (releaseState) {
case ONLINE:
List<ProcessTaskRelation> relationList = processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
scheduleObj.setReleaseState(ReleaseState.ONLINE);
scheduleMapper.updateById(scheduleObj);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
if (updateProcess > 0) {
logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, scheduleObj.getId(), code);
// set status
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
int updateSchedule = scheduleMapper.updateById(scheduleObj);
if (updateSchedule == 0) {
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
}
schedulerService.deleteSchedule(project.getId(), scheduleObj.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,119 | [Bug] [API] Update ProcessInstance error and without reason output | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://ftp.bmp.ovh/imgs/2022/01/eca77fe88da36409.jpg)
Update ProcessInstance error.And there is no cause for error.
### What you expected to happen
Update ProcessInstance error
### How to reproduce
create a new task in process instance edit page and link to another task.
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8119 | https://github.com/apache/dolphinscheduler/pull/8122 | 38b40e7ac49390164207104a4a788639b12696c4 | b5631d5281e84b72afa44a3d37631ffdbfd178ca | "2022-01-19T07:32:41Z" | java | "2022-01-21T01:32:38Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service impl
*/
@Service
public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService {
public static final String TASK_TYPE = "taskType";
public static final String LOCAL_PARAMS_LIST = "localParamsList";
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefineMapper;
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ExecutorService execService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
LoggerService loggerService;
@Autowired
ProcessDefinitionLogMapper processDefinitionLogMapper;
@Autowired
TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
UsersService usersService;
@Autowired
private TenantMapper tenantMapper;
@Autowired
TaskDefinitionMapper taskDefinitionMapper;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
*/
@Override
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, long projectCode, int size, String startTime, String endTime) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (0 > size) {
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size);
return result;
}
if (Objects.isNull(startTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME);
return result;
}
Date start = DateUtils.stringToDate(startTime);
if (Objects.isNull(endTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME);
return result;
}
Date end = DateUtils.stringToDate(endTime);
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
return result;
}
if (start.getTime() > end.getTime()) {
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime);
return result;
}
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS, projectCode);
result.put(DATA_LIST, processInstances);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
* @param loginUser login user
* @param projectCode project code
* @param processId process instance id
* @return process instance detail
*/
@Override
public Map<String, Object> queryProcessInstanceById(User loginUser, long projectCode, Integer processId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
} else {
processInstance.setWarningGroupId(processDefinition.getWarningGroupId());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setDagData(processService.genDagData(processDefinition));
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status
*
* @param loginUser login user
* @param projectCode project code
* @param processDefineCode process definition code
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startDate start time
* @param endDate end time
* @return process instance list
*/
@Override
public Result queryProcessInstanceList(User loginUser, long projectCode, long processDefineCode, String startDate, String endDate, String searchVal, String executorName,
ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
putMsg(result,resultEnum);
return result;
}
int[] statusArray = null;
// filter by state
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Map<String, Object> checkAndParseDateResult = checkAndParseDateParameters(startDate, endDate);
resultEnum = (Status) checkAndParseDateResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
putMsg(result,resultEnum);
return result;
}
Date start = (Date) checkAndParseDateResult.get(Constants.START_TIME);
Date end = (Date) checkAndParseDateResult.get(Constants.END_TIME);
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo<ProcessInstance> pageInfo = new PageInfo<>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList = processInstanceMapper.queryProcessInstanceListPaging(page,
project.getCode(), processDefineCode, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
List<Integer> userIds = Collections.emptyList();
if (CollectionUtils.isNotEmpty(processInstances)) {
userIds = processInstances.stream().map(ProcessInstance::getExecutorId).collect(Collectors.toList());
}
List<User> users = usersService.queryUser(userIds);
Map<Integer, User> idToUserMap = Collections.emptyMap();
if (CollectionUtils.isNotEmpty(users)) {
idToUserMap = users.stream().collect(Collectors.toMap(User::getId, Function.identity()));
}
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = idToUserMap.get(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotal((int) processInstanceList.getTotal());
pageInfo.setTotalList(processInstances);
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task list by process instance id
*
* @param loginUser login user
* @param projectCode project code
* @param processId process instance id
* @return task list for the process instance
* @throws IOException io exception
*/
@Override
public Map<String, Object> queryTaskListByProcessId(User loginUser, long projectCode, Integer processId) throws IOException {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId);
return result;
}
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* add dependent result for dependent task
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (TaskType.DEPENDENT.getDesc().equalsIgnoreCase(taskInstance.getTaskType())) {
Result<String> logResult = loggerService.queryLog(
taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT);
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
}
}
}
}
@Override
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim());
resultMap.put(key, dependResult);
}
}
return resultMap;
}
/**
* query sub process instance detail info by task id
*
* @param loginUser login user
* @param projectCode project code
* @param taskId task id
* @return sub process instance detail
*/
@Override
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, long projectCode, Integer taskId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskInstance.getTaskCode());
if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
}
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance
*
* @param loginUser login user
* @param projectCode project code
* @param taskRelationJson process task relation json
* @param taskDefinitionJson taskDefinitionJson
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param globalParams global params
* @param locations locations for nodes
* @param timeout timeout
* @param tenantCode tenantCode
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> updateProcessInstance(User loginUser, long projectCode, Integer processInstanceId, String taskRelationJson,
String taskDefinitionJson, String scheduleTime, Boolean syncDefine, String globalParams,
String locations, int timeout, String tenantCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
//check process instance exists
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance exists in project
ProcessDefinition processDefinition0 = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition0 != null && projectCode != processDefinition0.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
return result;
}
setProcessInstance(processInstance, tenantCode, scheduleTime, globalParams, timeout);
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (taskDefinitionLogs.isEmpty()) {
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) {
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, syncDefine);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
List<ProcessTaskRelationLog> taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
//check workflow json is valid
result = processDefinitionService.checkProcessNodeList(taskRelationJson);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
int tenantId = -1;
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
tenantId = tenant.getId();
}
processDefinition.set(projectCode, processDefinition.getName(), processDefinition.getDescription(), globalParams, locations, timeout, tenantId);
processDefinition.setUpdateTime(new Date());
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, syncDefine, Boolean.FALSE);
if (insertVersion == 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, syncDefine);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
processInstance.setProcessDefinitionVersion(insertVersion);
int update = processService.updateProcessInstance(processInstance);
if (update == 0) {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance attributes
*/
private void setProcessInstance(ProcessInstance processInstance, String tenantCode, String scheduleTime, String globalParams, int timeout) {
Date schedule = processInstance.getScheduleTime();
if (scheduleTime != null) {
schedule = DateUtils.getScheduleDate(scheduleTime);
}
processInstance.setScheduleTime(schedule);
List<Property> globalParamList = JSONUtils.toList(globalParams, Property.class);
Map<String, String> globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, processInstance.getCmdTypeIfComplement(), schedule);
processInstance.setTimeout(timeout);
processInstance.setTenantCode(tenantCode);
processInstance.setGlobalParams(globalParams);
}
/**
* query parent process instance detail info by sub process instance id
*
* @param loginUser login user
* @param projectCode project code
* @param subId sub process id
* @return parent instance detail
*/
@Override
public Map<String, Object> queryParentInstanceBySubId(User loginUser, long projectCode, Integer subId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
}
if (subInstance.getIsSubProcess() == Flag.NO) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
return result;
}
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.PARENT_WORKFLOW_INSTANCE, parentWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete process instance by id, at the same time,delete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectCode project code
* @param processInstanceId process instance id
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, long projectCode, Integer processInstanceId) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId));
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "delete");
return result;
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId));
return result;
}
try {
processService.removeTaskLogFile(processInstanceId);
} catch (Exception ignore) {
// ignore
}
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
processService.deleteWorkTaskInstanceByProcessInstanceId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
}
return result;
}
/**
* view process instance variables
*
* @param projectCode project code
* @param processInstanceId process instance id
* @return variables data
*/
@Override
public Map<String, Object> viewVariables(long projectCode, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String userDefinedParams = processInstance.getGlobalParams();
// global params
List<Property> globalParams = new ArrayList<>();
// global param string
String globalParamStr = ParameterUtils.convertParameterPlaceholders(JSONUtils.toJsonString(globalParams), timeParams);
globalParams = JSONUtils.toList(globalParamStr, Property.class);
for (Property property : globalParams) {
timeParams.put(property.getProp(), property.getValue());
}
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
Map<String, Map<String, Object>> localUserDefParams = getLocalParams(processInstance, timeParams);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get local params
*/
private Map<String, Map<String, Object>> getLocalParams(ProcessInstance processInstance, Map<String, String> timeParams) {
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
List<TaskInstance> taskInstanceList = taskInstanceMapper.findValidTaskListByProcessId(processInstance.getId(), Flag.YES);
for (TaskInstance taskInstance : taskInstanceList) {
TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(
taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion());
String localParams = JSONUtils.getNodeString(taskDefinitionLog.getTaskParams(), LOCAL_PARAMS);
if (!StringUtils.isEmpty(localParams)) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put(TASK_TYPE, taskDefinitionLog.getTaskType());
localParamsMap.put(LOCAL_PARAMS_LIST, localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskDefinitionLog.getName(), localParamsMap);
}
}
}
return localUserDefParams;
}
/**
* encapsulation gantt structure
*
* @param projectCode project code
* @param processInstanceId process instance id
* @return gantt tree data
* @throws Exception exception when json parse
*/
@Override
public Map<String, Object> viewGantt(long projectCode, Integer processInstanceId) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
ProcessDefinition processDefinition = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(
processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion()
);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
GanttDto ganttDto = new GanttDto();
DAG<String, TaskNode, TaskNodeRelation> dag = processService.genDagGraph(processDefinition);
//topological sort
List<String> nodeList = dag.topologicalSort();
ganttDto.setTaskNames(nodeList);
List<Task> taskList = new ArrayList<>();
for (String node : nodeList) {
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstanceId, Long.parseLong(node));
if (taskInstance == null) {
continue;
}
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
Task task = new Task();
task.setTaskName(taskInstance.getName());
task.getStartDate().add(startTime.getTime());
task.getEndDate().add(endTime.getTime());
task.setIsoStart(startTime);
task.setIsoEnd(endTime);
task.setStatus(taskInstance.getState().toString());
task.setExecutionDate(taskInstance.getStartTime());
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime()));
taskList.add(task);
}
ganttDto.setTasks(taskList);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by processDefinitionCode and stateArray
*
* @param processDefinitionCode processDefinitionCode
* @param states states array
* @return process instance list
*/
@Override
public List<ProcessInstance> queryByProcessDefineCodeAndStatus(Long processDefinitionCode, int[] states) {
return processInstanceMapper.queryByProcessDefineCodeAndStatus(processDefinitionCode, states);
}
/**
* query process instance by processDefinitionCode
*
* @param processDefinitionCode processDefinitionCode
* @param size size
* @return process instance list
*/
@Override
public List<ProcessInstance> queryByProcessDefineCode(Long processDefinitionCode, int size) {
return processInstanceMapper.queryByProcessDefineCode(processDefinitionCode, size);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,119 | [Bug] [API] Update ProcessInstance error and without reason output | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://ftp.bmp.ovh/imgs/2022/01/eca77fe88da36409.jpg)
Update ProcessInstance error.And there is no cause for error.
### What you expected to happen
Update ProcessInstance error
### How to reproduce
create a new task in process instance edit page and link to another task.
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8119 | https://github.com/apache/dolphinscheduler/pull/8122 | 38b40e7ac49390164207104a4a788639b12696c4 | b5631d5281e84b72afa44a3d37631ffdbfd178ca | "2022-01-19T07:32:41Z" | java | "2022-01-21T01:32:38Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.powermock.api.mockito.PowerMockito.mock;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang.StringUtils;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.mock.web.MockMultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
/**
* process definition service test
*/
@RunWith(MockitoJUnitRunner.class)
public class ProcessDefinitionServiceTest {
private static final String taskRelationJson = "[{\"name\":\"\",\"preTaskCode\":0,\"preTaskVersion\":0,\"postTaskCode\":123456789,"
+ "\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"},{\"name\":\"\",\"preTaskCode\":123456789,"
+ "\"preTaskVersion\":1,\"postTaskCode\":123451234,\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"}]";
@InjectMocks
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionMapper processDefineMapper;
@Mock
private ProcessTaskRelationMapper processTaskRelationMapper;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Mock
private ScheduleMapper scheduleMapper;
@Mock
private ProcessService processService;
@Mock
private ProcessInstanceService processInstanceService;
@Mock
private TaskInstanceMapper taskInstanceMapper;
@Mock
private TenantMapper tenantMapper;
@Mock
private DataSourceMapper dataSourceMapper;
@Test
public void testQueryProcessDefinitionList() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionList(loginUser, projectCode);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
//project check auth success
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
List<ProcessDefinition> resourceList = new ArrayList<>();
resourceList.add(getProcessDefinition());
Mockito.when(processDefineMapper.queryAllDefinitionList(project.getCode())).thenReturn(resourceList);
Map<String, Object> checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser, projectCode);
Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS));
}
@Test
@SuppressWarnings("unchecked")
public void testQueryProcessDefinitionListPaging() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Result map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectCode, "", 1, 5, 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUND.getCode(), (int) map.getCode());
putMsg(result, Status.SUCCESS, projectCode);
loginUser.setId(1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Page<ProcessDefinition> page = new Page<>(1, 10);
page.setTotal(30);
Mockito.when(processDefineMapper.queryDefineListPaging(
Mockito.any(IPage.class)
, Mockito.eq("")
, Mockito.eq(loginUser.getId())
, Mockito.eq(project.getCode())
, Mockito.anyBoolean())).thenReturn(page);
Result map1 = processDefinitionService.queryProcessDefinitionListPaging(
loginUser, 1L, "", 1, 10, loginUser.getId());
Assert.assertEquals(Status.SUCCESS.getMsg(), map1.getMsg());
}
@Test
public void testQueryProcessDefinitionByCode() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Tenant tenant = new Tenant();
tenant.setId(1);
tenant.setTenantCode("root");
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionByCode(loginUser, 1L, 1L);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
DagData dagData = new DagData(getProcessDefinition(), null, null);
Mockito.when(processService.genDagData(Mockito.any())).thenReturn(dagData);
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProcessDefinitionByCode(loginUser, projectCode, 1L);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS));
//instance exit
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(getProcessDefinition());
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(tenantMapper.queryById(1)).thenReturn(tenant);
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryProcessDefinitionByName() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionByName(loginUser, projectCode, "test_def");
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processDefineMapper.queryByDefineName(project.getCode(), "test_def")).thenReturn(null);
Map<String, Object> instanceNotExitRes = processDefinitionService.queryProcessDefinitionByName(loginUser, projectCode, "test_def");
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotExitRes.get(Constants.STATUS));
//instance exit
Mockito.when(processDefineMapper.queryByDefineName(project.getCode(), "test")).thenReturn(getProcessDefinition());
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionByName(loginUser, projectCode, "test");
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testBatchCopyProcessDefinition() {
long projectCode = 1L;
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
// copy project definition ids empty test
Map<String, Object> map = processDefinitionService.batchCopyProcessDefinition(loginUser, projectCode, StringUtils.EMPTY, 2L);
Assert.assertEquals(Status.PROCESS_DEFINITION_CODES_IS_EMPTY, map.get(Constants.STATUS));
// project check auth fail
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map1 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectCode, String.valueOf(project.getId()), 2L);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map1.get(Constants.STATUS));
// project check auth success, target project name not equal project name, check auth target project fail
projectCode = 2L;
Project project1 = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
putMsg(result, Status.SUCCESS, projectCode);
ProcessDefinition definition = getProcessDefinition();
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(definition);
Set<Long> definitionCodes = Arrays.stream("46".split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
Mockito.when(processDefineMapper.queryByCodes(definitionCodes)).thenReturn(processDefinitionList);
Mockito.when(processService.saveProcessDefine(loginUser, definition, Boolean.TRUE, Boolean.TRUE)).thenReturn(2);
Map<String, Object> map3 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectCode, "46", 1L);
Assert.assertEquals(Status.SUCCESS, map3.get(Constants.STATUS));
}
@Test
public void testBatchMoveProcessDefinition() {
long projectCode = 1L;
Project project1 = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project1);
long projectCode2 = 2L;
Project project2 = getProject(projectCode2);
Mockito.when(projectMapper.queryByCode(projectCode2)).thenReturn(project2);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project1, projectCode)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project2, projectCode2)).thenReturn(result);
ProcessDefinition definition = getProcessDefinition();
definition.setVersion(1);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(definition);
Set<Long> definitionCodes = Arrays.stream("46".split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
Mockito.when(processDefineMapper.queryByCodes(definitionCodes)).thenReturn(processDefinitionList);
Mockito.when(processService.saveProcessDefine(loginUser, definition, Boolean.TRUE, Boolean.TRUE)).thenReturn(2);
Mockito.when(processTaskRelationMapper.queryByProcessCode(projectCode, 46L)).thenReturn(getProcessTaskRelation(projectCode));
putMsg(result, Status.SUCCESS);
Map<String, Object> successRes = processDefinitionService.batchMoveProcessDefinition(
loginUser, projectCode, "46", projectCode2);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void deleteProcessDefinitionByCodeTest() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 6L);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(processDefineMapper.queryByCode(1L)).thenReturn(null);
Map<String, Object> instanceNotExitRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 1L);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotExitRes.get(Constants.STATUS));
ProcessDefinition processDefinition = getProcessDefinition();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
//user no auth
loginUser.setUserType(UserType.GENERAL_USER);
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
Map<String, Object> userNoAuthRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoAuthRes.get(Constants.STATUS));
//process definition online
loginUser.setUserType(UserType.ADMIN_USER);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
processDefinition.setReleaseState(ReleaseState.ONLINE);
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
Map<String, Object> dfOnlineRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.PROCESS_DEFINE_STATE_ONLINE, dfOnlineRes.get(Constants.STATUS));
//scheduler list elements > 1
processDefinition.setReleaseState(ReleaseState.OFFLINE);
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule());
Mockito.when(scheduleMapper.deleteById(46)).thenReturn(1);
Mockito.when(processDefineMapper.deleteById(processDefinition.getId())).thenReturn(1);
Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1);
Map<String, Object> schedulerGreaterThanOneRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SUCCESS, schedulerGreaterThanOneRes.get(Constants.STATUS));
//scheduler online
Schedule schedule = getSchedule();
schedule.setReleaseState(ReleaseState.ONLINE);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedule);
Map<String, Object> schedulerOnlineRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SCHEDULE_CRON_STATE_ONLINE, schedulerOnlineRes.get(Constants.STATUS));
//delete success
schedule.setReleaseState(ReleaseState.OFFLINE);
Mockito.when(processDefineMapper.deleteById(46)).thenReturn(1);
Mockito.when(scheduleMapper.deleteById(schedule.getId())).thenReturn(1);
Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1);
Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule());
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> deleteSuccess = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SUCCESS, deleteSuccess.get(Constants.STATUS));
}
@Test
@Ignore
public void testReleaseProcessDefinition() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.releaseProcessDefinition(loginUser, projectCode,
6, ReleaseState.OFFLINE);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
// project check auth success, processs definition online
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(getProcessDefinition());
List<ProcessTaskRelation> processTaskRelationList = new ArrayList<>();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(46L);
processTaskRelation.setPostTaskCode(123L);
processTaskRelationList.add(processTaskRelation);
Mockito.when(processService.findRelationByCode(46L, 1)).thenReturn(processTaskRelationList);
Map<String, Object> onlineRes = processDefinitionService.releaseProcessDefinition(
loginUser, projectCode, 46, ReleaseState.ONLINE);
Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS));
// project check auth success, processs definition online
Map<String, Object> onlineWithResourceRes = processDefinitionService.releaseProcessDefinition(
loginUser, projectCode, 46, ReleaseState.ONLINE);
Assert.assertEquals(Status.SUCCESS, onlineWithResourceRes.get(Constants.STATUS));
// release error code
Map<String, Object> failRes = processDefinitionService.releaseProcessDefinition(
loginUser, projectCode, 46, ReleaseState.getEnum(2));
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS));
}
@Test
public void testVerifyProcessDefinitionName() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> map = processDefinitionService.verifyProcessDefinitionName(loginUser,
projectCode, "test_pdf");
Assert.assertEquals(Status.PROJECT_NOT_FOUND, map.get(Constants.STATUS));
//project check auth success, process not exist
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(processDefineMapper.verifyByDefineName(project.getCode(), "test_pdf")).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
projectCode, "test_pdf");
Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS));
//process exist
Mockito.when(processDefineMapper.verifyByDefineName(project.getCode(), "test_pdf")).thenReturn(getProcessDefinition());
Map<String, Object> processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
projectCode, "test_pdf");
Assert.assertEquals(Status.PROCESS_DEFINITION_NAME_EXIST, processExistRes.get(Constants.STATUS));
}
@Test
public void testCheckProcessNodeList() {
Map<String, Object> dataNotValidRes = processDefinitionService.checkProcessNodeList(null);
Assert.assertEquals(Status.DATA_IS_NOT_VALID, dataNotValidRes.get(Constants.STATUS));
Map<String, Object> taskEmptyRes = processDefinitionService.checkProcessNodeList(taskRelationJson);
Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, taskEmptyRes.get(Constants.STATUS));
}
@Test
public void testGetTaskNodeListByDefinitionCode() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
//process definition not exist
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(null);
Map<String, Object> processDefinitionNullRes = processDefinitionService.getTaskNodeListByDefinitionCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS));
//success
ProcessDefinition processDefinition = getProcessDefinition();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(processService.genDagData(Mockito.any())).thenReturn(new DagData(processDefinition, null, null));
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
Map<String, Object> dataNotValidRes = processDefinitionService.getTaskNodeListByDefinitionCode(loginUser, projectCode, 46L);
Assert.assertEquals(Status.SUCCESS, dataNotValidRes.get(Constants.STATUS));
}
@Test
public void testGetTaskNodeListByDefinitionCodes() {
long projectCode = 1L;
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Project project = getProject(projectCode);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
//process definition not exist
String defineCodes = "46";
Set<Long> defineCodeSet = Lists.newArrayList(defineCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet());
Mockito.when(processDefineMapper.queryByCodes(defineCodeSet)).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.getNodeListMapByDefinitionCodes(loginUser, projectCode, defineCodes);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processNotExistRes.get(Constants.STATUS));
putMsg(result, Status.SUCCESS, projectCode);
ProcessDefinition processDefinition = getProcessDefinition();
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryByCodes(defineCodeSet)).thenReturn(processDefinitionList);
Mockito.when(processService.genDagData(Mockito.any())).thenReturn(new DagData(processDefinition, null, null));
Project project1 = getProject(projectCode);
List<Project> projects = new ArrayList<>();
projects.add(project1);
Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId())).thenReturn(projects);
Map<String, Object> successRes = processDefinitionService.getNodeListMapByDefinitionCodes(loginUser, projectCode, defineCodes);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryAllProcessDefinitionByProjectCode() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
long projectCode = 2L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
putMsg(result, Status.SUCCESS, projectCode);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
ProcessDefinition processDefinition = getProcessDefinition();
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryAllDefinitionList(projectCode)).thenReturn(processDefinitionList);
Map<String, Object> successRes = processDefinitionService.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testViewTree() {
//process definition not exist
ProcessDefinition processDefinition = getProcessDefinition();
Map<String, Object> processDefinitionNullRes = processDefinitionService.viewTree(processDefinition.getProjectCode(),46, 10);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS));
//task instance not exist
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
Mockito.when(processService.genDagGraph(processDefinition)).thenReturn(new DAG<>());
Map<String, Object> taskNullRes = processDefinitionService.viewTree(processDefinition.getProjectCode(),46, 10);
Assert.assertEquals(Status.SUCCESS, taskNullRes.get(Constants.STATUS));
//task instance exist
Map<String, Object> taskNotNuLLRes = processDefinitionService.viewTree(processDefinition.getProjectCode(),46, 10);
Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS));
}
@Test
public void testSubProcessViewTree() {
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
Mockito.when(processService.genDagGraph(processDefinition)).thenReturn(new DAG<>());
Map<String, Object> taskNotNuLLRes = processDefinitionService.viewTree(processDefinition.getProjectCode(), 46, 10);
Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS));
}
@Test
public void testUpdateProcessDefinition() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
long projectCode = 1L;
Project project = getProject(projectCode);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> updateResult = processDefinitionService.updateProcessDefinition(loginUser, projectCode, "test", 1,
"", "", "", 0, "root", null, null, ProcessExecutionTypeEnum.PARALLEL);
Assert.assertEquals(Status.DATA_IS_NOT_VALID, updateResult.get(Constants.STATUS));
}
@Test
public void testBatchExportProcessDefinitionByCodes() {
processDefinitionService.batchExportProcessDefinitionByCodes(null, 1L, null, null);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
long projectCode = 1L;
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
processDefinitionService.batchExportProcessDefinitionByCodes(
loginUser, projectCode, "1", null);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(1);
Map<String, Object> checkResult = new HashMap<>();
checkResult.put(Constants.STATUS, Status.SUCCESS);
Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(checkResult);
HttpServletResponse response = mock(HttpServletResponse.class);
DagData dagData = new DagData(getProcessDefinition(), null, null);
Mockito.when(processService.genDagData(Mockito.any())).thenReturn(dagData);
processDefinitionService.batchExportProcessDefinitionByCodes(loginUser, projectCode, "1", response);
Assert.assertNotNull(processDefinitionService.exportProcessDagData(processDefinition));
}
@Test
public void testImportSqlProcessDefinition() throws Exception {
int userId = 10;
User loginUser = Mockito.mock(User.class);
Mockito.when(loginUser.getId()).thenReturn(userId);
Mockito.when(loginUser.getTenantId()).thenReturn(2);
Mockito.when(loginUser.getUserType()).thenReturn(UserType.GENERAL_USER);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ZipOutputStream outputStream = new ZipOutputStream(byteArrayOutputStream);
outputStream.putNextEntry(new ZipEntry("import_sql/"));
outputStream.putNextEntry(new ZipEntry("import_sql/a.sql"));
outputStream.write("-- upstream: start_auto_dag\n-- datasource: mysql_1\nselect 1;".getBytes(StandardCharsets.UTF_8));
outputStream.putNextEntry(new ZipEntry("import_sql/b.sql"));
outputStream.write("-- name: start_auto_dag\n-- datasource: mysql_1\nselect 1;".getBytes(StandardCharsets.UTF_8));
outputStream.close();
MockMultipartFile mockMultipartFile = new MockMultipartFile("import_sql.zip", byteArrayOutputStream.toByteArray());
DataSource dataSource = Mockito.mock(DataSource.class);
Mockito.when(dataSource.getId()).thenReturn(1);
Mockito.when(dataSource.getType()).thenReturn(DbType.MYSQL);
Mockito.when(dataSourceMapper.queryDataSourceByNameAndUserId(userId, "mysql_1")).thenReturn(dataSource);
long projectCode = 1001;
Mockito.when(processService.saveTaskDefine(Mockito.same(loginUser), Mockito.eq(projectCode), Mockito.notNull(), Mockito.anyBoolean())).thenReturn(2);
Mockito.when(processService.saveProcessDefine(Mockito.same(loginUser), Mockito.notNull(), Mockito.notNull(), Mockito.anyBoolean())).thenReturn(1);
Mockito.when(processService.saveTaskRelation(Mockito.same(loginUser), Mockito.eq(projectCode), Mockito.anyLong(),
Mockito.eq(1), Mockito.notNull(), Mockito.notNull(), Mockito.anyBoolean())).thenReturn(0);
Map<String, Object> result = processDefinitionService.importSqlProcessDefinition(loginUser, projectCode, mockMultipartFile);
Assert.assertEquals(result.get(Constants.STATUS), Status.SUCCESS);
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(46);
processDefinition.setProjectCode(1L);
processDefinition.setName("test_pdf");
processDefinition.setTenantId(1);
processDefinition.setDescription("");
processDefinition.setCode(46L);
return processDefinition;
}
/**
* get mock Project
*
* @param projectCode projectCode
* @return Project
*/
private Project getProject(long projectCode) {
Project project = new Project();
project.setCode(projectCode);
project.setId(1);
project.setName("test");
project.setUserId(1);
return project;
}
private List<ProcessTaskRelation> getProcessTaskRelation(long projectCode) {
List<ProcessTaskRelation> processTaskRelations = new ArrayList<>();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(projectCode);
processTaskRelation.setProcessDefinitionCode(46L);
processTaskRelation.setProcessDefinitionVersion(1);
processTaskRelations.add(processTaskRelation);
return processTaskRelations;
}
/**
* get mock schedule
*
* @return schedule
*/
private Schedule getSchedule() {
Date date = new Date();
Schedule schedule = new Schedule();
schedule.setId(46);
schedule.setProcessDefinitionCode(1);
schedule.setStartTime(date);
schedule.setEndTime(date);
schedule.setCrontab("0 0 5 * * ? *");
schedule.setFailureStrategy(FailureStrategy.END);
schedule.setUserId(1);
schedule.setReleaseState(ReleaseState.OFFLINE);
schedule.setProcessInstancePriority(Priority.MEDIUM);
schedule.setWarningType(WarningType.NONE);
schedule.setWarningGroupId(1);
schedule.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP);
return schedule;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,119 | [Bug] [API] Update ProcessInstance error and without reason output | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://ftp.bmp.ovh/imgs/2022/01/eca77fe88da36409.jpg)
Update ProcessInstance error.And there is no cause for error.
### What you expected to happen
Update ProcessInstance error
### How to reproduce
create a new task in process instance edit page and link to another task.
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8119 | https://github.com/apache/dolphinscheduler/pull/8122 | 38b40e7ac49390164207104a4a788639b12696c4 | b5631d5281e84b72afa44a3d37631ffdbfd178ca | "2022-01-19T07:32:41Z" | java | "2022-01-21T01:32:38Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProcessInstanceServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessInstanceServiceTest {
@InjectMocks
ProcessInstanceServiceImpl processInstanceService;
@Mock
ProjectMapper projectMapper;
@Mock
ProjectServiceImpl projectService;
@Mock
ProcessService processService;
@Mock
ProcessInstanceMapper processInstanceMapper;
@Mock
ProcessDefinitionLogMapper processDefinitionLogMapper;
@Mock
ProcessDefinitionMapper processDefineMapper;
@Mock
ProcessDefinitionService processDefinitionService;
@Mock
TaskInstanceMapper taskInstanceMapper;
@Mock
LoggerServiceImpl loggerService;
@Mock
UsersService usersService;
@Mock
TenantMapper tenantMapper;
@Mock
TaskDefinitionMapper taskDefinitionMapper;
private String shellJson = "[{\"name\":\"\",\"preTaskCode\":0,\"preTaskVersion\":0,\"postTaskCode\":123456789,"
+ "\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"},{\"name\":\"\",\"preTaskCode\":123456789,"
+ "\"preTaskVersion\":1,\"postTaskCode\":123451234,\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"}]";
private String taskJson = "[{\"name\":\"shell1\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[],"
+ "\"localParams\":[],\"rawScript\":\"echo 1\",\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}},"
+ "\"flag\":\"NORMAL\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":\"0\",\"failRetryInterval\":\"1\","
+ "\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"\",\"timeout\":null,\"delayTime\":\"0\"},{\"name\":\"shell2\",\"description\":\"\","
+ "\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 2\",\"conditionResult\":{\"successNode\""
+ ":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}},\"flag\":\"NORMAL\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\","
+ "\"failRetryTimes\":\"0\",\"failRetryInterval\":\"1\",\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"\",\"timeout\":null,\"delayTime\":\"0\"}]";
@Test
public void testQueryProcessInstanceList() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Result proejctAuthFailRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 46, "2020-01-01 00:00:00",
"2020-01-02 00:00:00", "", "test_user", ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.PROJECT_NOT_FOUND.getCode(), (int) proejctAuthFailRes.getCode());
Date start = DateUtils.getScheduleDate("2020-01-01 00:00:00");
Date end = DateUtils.getScheduleDate("2020-01-02 00:00:00");
ProcessInstance processInstance = getProcessInstance();
List<ProcessInstance> processInstanceList = new ArrayList<>();
Page<ProcessInstance> pageReturn = new Page<>(1, 10);
processInstanceList.add(processInstance);
pageReturn.setRecords(processInstanceList);
// data parameter check
putMsg(result, Status.SUCCESS, projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processDefineMapper.selectById(Mockito.anyInt())).thenReturn(getProcessDefinition());
when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class)
, Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(),
eq("192.168.xx.xx"), Mockito.any(), Mockito.any())).thenReturn(pageReturn);
Result dataParameterRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 1, "20200101 00:00:00",
"20200102 00:00:00", "", loginUser.getUserName(), ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), (int) dataParameterRes.getCode());
//project auth success
putMsg(result, Status.SUCCESS, projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), eq(1L), eq(""), eq(-1), Mockito.any(),
eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn);
when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser);
Result successRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00",
"2020-01-02 00:00:00", "", loginUser.getUserName(), ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int)successRes.getCode());
// data parameter empty
when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), eq(1L), eq(""), eq(-1), Mockito.any(),
eq("192.168.xx.xx"), eq(null), eq(null))).thenReturn(pageReturn);
successRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 1, "",
"", "", loginUser.getUserName(), ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int)successRes.getCode());
//executor null
when(usersService.queryUser(loginUser.getId())).thenReturn(null);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(-1);
Result executorExistRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00",
"2020-01-02 00:00:00", "", "admin", ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int)executorExistRes.getCode());
//executor name empty
when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), eq(1L), eq(""), eq(0), Mockito.any(),
eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn);
Result executorEmptyRes = processInstanceService.queryProcessInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00",
"2020-01-02 00:00:00", "", "", ExecutionStatus.SUBMITTED_SUCCESS,
"192.168.xx.xx", 1, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int)executorEmptyRes.getCode());
}
@Test
public void testQueryTopNLongestRunningProcessInstance() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
int size = 10;
String startTime = "2020-01-01 00:00:00";
String endTime = "2020-08-02 00:00:00";
Date start = DateUtils.getScheduleDate(startTime);
Date end = DateUtils.getScheduleDate(endTime);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectCode, size, startTime, endTime);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//project auth success
putMsg(result, Status.SUCCESS, projectCode);
ProcessInstance processInstance = getProcessInstance();
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser);
Map<String, Object> successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectCode, size, startTime, endTime);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryProcessInstanceById() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryProcessInstanceById(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//project auth success
ProcessInstance processInstance = getProcessInstance();
putMsg(result, Status.SUCCESS, projectCode);
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProjectCode(projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findProcessInstanceDetailById(processInstance.getId())).thenReturn(processInstance);
when(processService.findProcessDefinition(processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion())).thenReturn(processDefinition);
Map<String, Object> successRes = processInstanceService.queryProcessInstanceById(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
//worker group null
Map<String, Object> workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS));
//worker group exist
WorkerGroup workerGroup = getWorkGroup();
Map<String, Object> workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS));
}
@Test
public void testQueryTaskListByProcessId() throws IOException {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTaskListByProcessId(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//project auth success
putMsg(result, Status.SUCCESS, projectCode);
ProcessInstance processInstance = getProcessInstance();
processInstance.setState(ExecutionStatus.SUCCESS);
TaskInstance taskInstance = new TaskInstance();
taskInstance.setTaskType(TaskType.SHELL.getDesc());
List<TaskInstance> taskInstanceList = new ArrayList<>();
taskInstanceList.add(taskInstance);
Result res = new Result();
res.setCode(Status.SUCCESS.ordinal());
res.setData("xxx");
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findProcessInstanceDetailById(processInstance.getId())).thenReturn(processInstance);
when(processService.findValidTaskListByProcessId(processInstance.getId())).thenReturn(taskInstanceList);
when(loggerService.queryLog(taskInstance.getId(), 0, 4098)).thenReturn(res);
Map<String, Object> successRes = processInstanceService.queryTaskListByProcessId(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testParseLogForDependentResult() throws IOException {
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]"
+ " - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n"
+ "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]"
+ " - task : 223_10739_452334 exit status code : 0\n"
+ "[root@node2 current]# ";
Map<String, DependResult> resultMap =
processInstanceService.parseLogForDependentResult(logString);
Assert.assertEquals(1, resultMap.size());
}
@Test
public void testQuerySubProcessInstanceByTaskId() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//task null
putMsg(result, Status.SUCCESS, projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findTaskInstanceById(1)).thenReturn(null);
Map<String, Object> taskNullRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS, taskNullRes.get(Constants.STATUS));
//task not sub process
TaskInstance taskInstance = getTaskInstance();
taskInstance.setTaskType(TaskType.HTTP.getDesc());
taskInstance.setProcessInstanceId(1);
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
TaskDefinition taskDefinition = new TaskDefinition();
taskDefinition.setProjectCode(projectCode);
when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition);
Map<String, Object> notSubprocessRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, notSubprocessRes.get(Constants.STATUS));
//sub process not exist
TaskInstance subTask = getTaskInstance();
subTask.setTaskType(TaskType.SUB_PROCESS.getDesc());
subTask.setProcessInstanceId(1);
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findTaskInstanceById(subTask.getId())).thenReturn(subTask);
when(processService.findSubProcessInstance(subTask.getProcessInstanceId(), subTask.getId())).thenReturn(null);
Map<String, Object> subprocessNotExistRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUB_PROCESS_INSTANCE_NOT_EXIST, subprocessNotExistRes.get(Constants.STATUS));
//sub process exist
ProcessInstance processInstance = getProcessInstance();
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findSubProcessInstance(taskInstance.getProcessInstanceId(), taskInstance.getId())).thenReturn(processInstance);
Map<String, Object> subprocessExistRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, subprocessExistRes.get(Constants.STATUS));
}
@Test
public void testUpdateProcessInstance() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1,
shellJson, taskJson, "2020-02-21 00:00:00", true, "", "", 0, "");
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//process instance null
putMsg(result, Status.SUCCESS, projectCode);
ProcessInstance processInstance = getProcessInstance();
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
Map<String, Object> processInstanceNullRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1,
shellJson, taskJson,"2020-02-21 00:00:00", true, "", "", 0, "");
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
//process instance not finish
when(processService.findProcessInstanceDetailById(1)).thenReturn(processInstance);
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
putMsg(result, Status.SUCCESS, projectCode);
Map<String, Object> processInstanceNotFinishRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1,
shellJson, taskJson,"2020-02-21 00:00:00", true, "", "", 0, "");
Assert.assertEquals(Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstanceNotFinishRes.get(Constants.STATUS));
//process instance finish
processInstance.setState(ExecutionStatus.SUCCESS);
processInstance.setTimeout(3000);
processInstance.setCommandType(CommandType.STOP);
processInstance.setProcessDefinitionCode(46L);
processInstance.setProcessDefinitionVersion(1);
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setId(1);
processDefinition.setUserId(1);
processDefinition.setProjectCode(projectCode);
Tenant tenant = getTenant();
when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
when(tenantMapper.queryByTenantCode("root")).thenReturn(tenant);
when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant);
when(processService.updateProcessInstance(processInstance)).thenReturn(1);
when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.FALSE)).thenReturn(1);
when(processDefinitionService.checkProcessNodeList(shellJson)).thenReturn(result);
putMsg(result, Status.SUCCESS, projectCode);
Map<String, Object> processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1,
shellJson, taskJson,"2020-02-21 00:00:00", true, "", "", 0, "root");
Assert.assertEquals(Status.SUCCESS, processInstanceFinishRes.get(Constants.STATUS));
//success
when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition);
putMsg(result, Status.SUCCESS, projectCode);
when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.FALSE, Boolean.FALSE)).thenReturn(1);
Map<String, Object> successRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1,
shellJson, taskJson,"2020-02-21 00:00:00", Boolean.FALSE, "", "", 0, "root");
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryParentInstanceBySubId() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//project auth fail
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailRes.get(Constants.STATUS));
//process instance null
putMsg(result, Status.SUCCESS, projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
Map<String, Object> processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
//not sub process
ProcessInstance processInstance = getProcessInstance();
processInstance.setIsSubProcess(Flag.NO);
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findProcessInstanceDetailById(1)).thenReturn(processInstance);
Map<String, Object> notSubProcessRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, notSubProcessRes.get(Constants.STATUS));
//sub process
processInstance.setIsSubProcess(Flag.YES);
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findParentProcessInstance(1)).thenReturn(null);
Map<String, Object> subProcessNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUB_PROCESS_INSTANCE_NOT_EXIST, subProcessNullRes.get(Constants.STATUS));
//success
putMsg(result, Status.SUCCESS, projectCode);
when(processService.findParentProcessInstance(1)).thenReturn(processInstance);
Map<String, Object> successRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testDeleteProcessInstanceById() {
long projectCode = 1L;
User loginUser = getAdminUser();
Project project = getProject(projectCode);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
//process instance null
putMsg(result, Status.SUCCESS, projectCode);
when(projectMapper.queryByCode(projectCode)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
}
@Test
public void testViewVariables() {
//process instance not null
ProcessInstance processInstance = getProcessInstance();
processInstance.setCommandType(CommandType.SCHEDULER);
processInstance.setScheduleTime(new Date());
processInstance.setGlobalParams("");
when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance);
Map<String, Object> successRes = processInstanceService.viewVariables(1L,1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testViewGantt() throws Exception {
ProcessInstance processInstance = getProcessInstance();
TaskInstance taskInstance = getTaskInstance();
taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
taskInstance.setStartTime(new Date());
when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance);
when(processDefinitionLogMapper.queryByDefinitionCodeAndVersion(
processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion()
)).thenReturn(new ProcessDefinitionLog());
when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance);
when(taskInstanceMapper.queryByInstanceIdAndName(Mockito.anyInt(), Mockito.any())).thenReturn(taskInstance);
DAG<String, TaskNode, TaskNodeRelation> graph = new DAG<>();
for (int i = 1; i <= 7; ++i) {
graph.addNode(i + "", new TaskNode());
}
when(processService.genDagGraph(Mockito.any(ProcessDefinition.class)))
.thenReturn(graph);
Map<String, Object> successRes = processInstanceService.viewGantt(0L, 1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserName("admin");
loginUser.setUserType(UserType.GENERAL_USER);
return loginUser;
}
/**
* get mock Project
*
* @param projectCode projectCode
* @return Project
*/
private Project getProject(long projectCode) {
Project project = new Project();
project.setCode(projectCode);
project.setId(1);
project.setName("project_test1");
project.setUserId(1);
return project;
}
/**
* get Mock process instance
*
* @return process instance
*/
private ProcessInstance getProcessInstance() {
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(1);
processInstance.setName("test_process_instance");
processInstance.setProcessDefinitionCode(46L);
processInstance.setProcessDefinitionVersion(1);
processInstance.setStartTime(new Date());
processInstance.setEndTime(new Date());
return processInstance;
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setCode(46L);
processDefinition.setVersion(1);
processDefinition.setId(46);
processDefinition.setName("test_pdf");
processDefinition.setProjectCode(2L);
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
}
private Tenant getTenant() {
Tenant tenant = new Tenant();
tenant.setId(1);
tenant.setTenantCode("root");
return tenant;
}
/**
* get Mock worker group
*
* @return worker group
*/
private WorkerGroup getWorkGroup() {
WorkerGroup workerGroup = new WorkerGroup();
workerGroup.setName("test_workergroup");
return workerGroup;
}
/**
* get Mock task instance
*
* @return task instance
*/
private TaskInstance getTaskInstance() {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setName("test_task_instance");
taskInstance.setStartTime(new Date());
taskInstance.setEndTime(new Date());
taskInstance.setExecutorId(-1);
return taskInstance;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,348 | [Feature][Python] Add workflow as code task type MR | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type MR. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7348 | https://github.com/apache/dolphinscheduler/pull/8140 | df26be89ed06928f9c4a446e3c1ebc976d562cef | 7aeac75860d5d80c4da0e40b26723acb7cd02f81 | "2021-12-12T13:16:08Z" | java | "2022-01-21T01:54:36Z" | dolphinscheduler-python/pydolphinscheduler/examples/task_map_reduce_example.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,348 | [Feature][Python] Add workflow as code task type MR | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type MR. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7348 | https://github.com/apache/dolphinscheduler/pull/8140 | df26be89ed06928f9c4a446e3c1ebc976d562cef | 7aeac75860d5d80c4da0e40b26723acb7cd02f81 | "2021-12-12T13:16:08Z" | java | "2022-01-21T01:54:36Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Constants for pydolphinscheduler."""
class ProcessDefinitionReleaseState:
"""Constants for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition` release state."""
ONLINE: str = "ONLINE"
OFFLINE: str = "OFFLINE"
class ProcessDefinitionDefault:
"""Constants default value for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition`."""
PROJECT: str = "project-pydolphin"
TENANT: str = "tenant_pydolphin"
USER: str = "userPythonGateway"
# TODO simple set password same as username
USER_PWD: str = "userPythonGateway"
USER_EMAIL: str = "userPythonGateway@dolphinscheduler.com"
USER_PHONE: str = "11111111111"
USER_STATE: int = 1
QUEUE: str = "queuePythonGateway"
WORKER_GROUP: str = "default"
TIME_ZONE: str = "Asia/Shanghai"
class TaskPriority(str):
"""Constants for task priority."""
HIGHEST = "HIGHEST"
HIGH = "HIGH"
MEDIUM = "MEDIUM"
LOW = "LOW"
LOWEST = "LOWEST"
class TaskFlag(str):
"""Constants for task flag."""
YES = "YES"
NO = "NO"
class TaskTimeoutFlag(str):
"""Constants for task timeout flag."""
CLOSE = "CLOSE"
class TaskType(str):
"""Constants for task type, it will also show you which kind we support up to now."""
SHELL = "SHELL"
HTTP = "HTTP"
PYTHON = "PYTHON"
SQL = "SQL"
SUB_PROCESS = "SUB_PROCESS"
PROCEDURE = "PROCEDURE"
DATAX = "DATAX"
DEPENDENT = "DEPENDENT"
CONDITIONS = "CONDITIONS"
SWITCH = "SWITCH"
FLINK = "FLINK"
SPARK = "SPARK"
class DefaultTaskCodeNum(str):
"""Constants and default value for default task code number."""
DEFAULT = 1
class JavaGatewayDefault(str):
"""Constants and default value for java gateway."""
RESULT_MESSAGE_KEYWORD = "msg"
RESULT_MESSAGE_SUCCESS = "success"
RESULT_STATUS_KEYWORD = "status"
RESULT_STATUS_SUCCESS = "SUCCESS"
RESULT_DATA = "data"
class Delimiter(str):
"""Constants for delimiter."""
BAR = "-"
DASH = "/"
COLON = ":"
UNDERSCORE = "_"
DIRECTION = "->"
class Time(str):
"""Constants for date."""
FMT_STD_DATE = "%Y-%m-%d"
LEN_STD_DATE = 10
FMT_DASH_DATE = "%Y/%m/%d"
FMT_SHORT_DATE = "%Y%m%d"
LEN_SHORT_DATE = 8
FMT_STD_TIME = "%H:%M:%S"
FMT_NO_COLON_TIME = "%H%M%S"
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,348 | [Feature][Python] Add workflow as code task type MR | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type MR. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7348 | https://github.com/apache/dolphinscheduler/pull/8140 | df26be89ed06928f9c4a446e3c1ebc976d562cef | 7aeac75860d5d80c4da0e40b26723acb7cd02f81 | "2021-12-12T13:16:08Z" | java | "2022-01-21T01:54:36Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/map_reduce.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,348 | [Feature][Python] Add workflow as code task type MR | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add python api task type MR. sub task in #6407. we should cover all parameter from UI side and make it suitable for python.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7348 | https://github.com/apache/dolphinscheduler/pull/8140 | df26be89ed06928f9c4a446e3c1ebc976d562cef | 7aeac75860d5d80c4da0e40b26723acb7cd02f81 | "2021-12-12T13:16:08Z" | java | "2022-01-21T01:54:36Z" | dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_map_reduce.py | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES;
import org.apache.dolphinscheduler.alert.api.AlertChannel;
import org.apache.dolphinscheduler.alert.api.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.params.PasswordParam;
import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.params.base.Validate;
import org.apache.dolphinscheduler.spi.params.input.InputParam;
import org.apache.dolphinscheduler.spi.params.radio.RadioParam;
import java.util.Arrays;
import java.util.List;
import com.google.auto.service.AutoService;
@AutoService(AlertChannelFactory.class)
public final class DingTalkAlertChannelFactory implements AlertChannelFactory {
@Override
public String name() {
return "DingTalk";
}
@Override
public List<PluginParams> params() {
InputParam webHookParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, DingTalkParamsConstants.DING_TALK_WEB_HOOK)
.addValidate(Validate.newBuilder()
.setRequired(true)
.build())
.build();
InputParam keywordParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, DingTalkParamsConstants.DING_TALK_KEYWORD)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam secretParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_SECRET, DingTalkParamsConstants.DING_TALK_SECRET)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
RadioParam isEnableProxy = RadioParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.DING_TALK_PROXY_ENABLE)
.addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(STRING_FALSE)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam proxyParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY, DingTalkParamsConstants.DING_TALK_PROXY)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam portParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PORT, DingTalkParamsConstants.DING_TALK_PORT)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam userParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_USER, DingTalkParamsConstants.DING_TALK_USER)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
PasswordParam passwordParam = PasswordParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, DingTalkParamsConstants.DING_TALK_PASSWORD)
.setPlaceholder("if enable use authentication, you need input password")
.build();
return Arrays.asList(webHookParam, keywordParam, secretParam, isEnableProxy, proxyParam, portParam, userParam, passwordParam);
}
@Override
public AlertChannel create() {
return new DingTalkAlertChannel();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
public final class DingTalkParamsConstants {
static final String DING_TALK_PROXY_ENABLE = "$t('isEnableProxy')";
static final String NAME_DING_TALK_PROXY_ENABLE = "IsEnableProxy";
static final String DING_TALK_WEB_HOOK = "$t('webhook')";
static final String NAME_DING_TALK_WEB_HOOK = "WebHook";
static final String DING_TALK_KEYWORD = "$t('keyword')";
static final String NAME_DING_TALK_KEYWORD = "Keyword";
static final String DING_TALK_SECRET = "$t('secret')";
static final String NAME_DING_TALK_SECRET = "Secret";
static final String DING_TALK_PROXY = "$t('proxy')";
static final String NAME_DING_TALK_PROXY = "Proxy";
static final String DING_TALK_PORT = "$t('port')";
static final String NAME_DING_TALK_PORT = "Port";
static final String DING_TALK_USER = "$t('user')";
static final String NAME_DING_TALK_USER = "User";
static final String DING_TALK_PASSWORD = "$t('password')";
static final String NAME_DING_TALK_PASSWORD = "Password";
private DingTalkParamsConstants() {
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import org.apache.dolphinscheduler.alert.api.AlertResult;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* https://open.dingtalk.com/document/robots/custom-robot-access
* https://open.dingtalk.com/document/robots/customize-robot-security-settings
* </p>
*/
public final class DingTalkSender {
private static final Logger logger = LoggerFactory.getLogger(DingTalkSender.class);
private final String url;
private final String keyword;
private final String secret;
private final Boolean enableProxy;
private String proxy;
private Integer port;
private String user;
private String password;
DingTalkSender(Map<String, String> config) {
url = config.get(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK);
keyword = config.get(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD);
secret = config.get(DingTalkParamsConstants.NAME_DING_TALK_SECRET);
enableProxy = Boolean.valueOf(config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE));
if (Boolean.TRUE.equals(enableProxy)) {
port = Integer.parseInt(config.get(DingTalkParamsConstants.NAME_DING_TALK_PORT));
proxy = config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY);
user = config.get(DingTalkParamsConstants.DING_TALK_USER);
password = config.get(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD);
}
}
private static HttpPost constructHttpPost(String url, String msg) {
HttpPost post = new HttpPost(url);
StringEntity entity = new StringEntity(msg, StandardCharsets.UTF_8);
post.setEntity(entity);
post.addHeader("Content-Type", "application/json; charset=utf-8");
return post;
}
private static CloseableHttpClient getProxyClient(String proxy, int port, String user, String password) {
HttpHost httpProxy = new HttpHost(proxy, port);
CredentialsProvider provider = new BasicCredentialsProvider();
provider.setCredentials(new AuthScope(httpProxy), new UsernamePasswordCredentials(user, password));
return HttpClients.custom().setDefaultCredentialsProvider(provider).build();
}
private static CloseableHttpClient getDefaultClient() {
return HttpClients.createDefault();
}
private static RequestConfig getProxyConfig(String proxy, int port) {
HttpHost httpProxy = new HttpHost(proxy, port);
return RequestConfig.custom().setProxy(httpProxy).build();
}
private static String textToJsonString(String text) {
Map<String, Object> items = new HashMap<>();
items.put("msgtype", "text");
Map<String, String> textContent = new HashMap<>();
byte[] byt = StringUtils.getBytesUtf8(text);
String txt = StringUtils.newStringUtf8(byt);
textContent.put("content", txt);
items.put("text", textContent);
return JSONUtils.toJsonString(items);
}
private static AlertResult checkSendDingTalkSendMsgResult(String result) {
AlertResult alertResult = new AlertResult();
alertResult.setStatus("false");
if (null == result) {
alertResult.setMessage("send ding talk msg error");
logger.info("send ding talk msg error,ding talk server resp is null");
return alertResult;
}
DingTalkSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, DingTalkSendMsgResponse.class);
if (null == sendMsgResponse) {
alertResult.setMessage("send ding talk msg fail");
logger.info("send ding talk msg error,resp error");
return alertResult;
}
if (sendMsgResponse.errcode == 0) {
alertResult.setStatus("true");
alertResult.setMessage("send ding talk msg success");
return alertResult;
}
alertResult.setMessage(String.format("alert send ding talk msg error : %s", sendMsgResponse.getErrmsg()));
logger.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg());
return alertResult;
}
public AlertResult sendDingTalkMsg(String title, String content) {
AlertResult alertResult;
try {
String resp = sendMsg(title, content);
return checkSendDingTalkSendMsgResult(resp);
} catch (Exception e) {
logger.info("send ding talk alert msg exception : {}", e.getMessage());
alertResult = new AlertResult();
alertResult.setStatus("false");
alertResult.setMessage("send ding talk alert fail.");
}
return alertResult;
}
private String sendMsg(String title, String content) throws IOException {
StringBuilder text = new StringBuilder();
if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(keyword)) {
text.append(keyword);
text.append(":");
}
text.append(title);
text.append("\n");
text.append(content);
String msgToJson = textToJsonString(text.toString());
HttpPost httpPost = constructHttpPost(org.apache.dolphinscheduler.spi.utils.StringUtils.isBlank(secret) ? url : generateSignedUrl(), msgToJson);
CloseableHttpClient httpClient;
if (Boolean.TRUE.equals(enableProxy)) {
httpClient = getProxyClient(proxy, port, user, password);
RequestConfig rcf = getProxyConfig(proxy, port);
httpPost.setConfig(rcf);
} else {
httpClient = getDefaultClient();
}
try {
CloseableHttpResponse response = httpClient.execute(httpPost);
String resp;
try {
HttpEntity entity = response.getEntity();
resp = EntityUtils.toString(entity, "UTF-8");
EntityUtils.consume(entity);
} finally {
response.close();
}
logger.info("Ding Talk send title :{},content : {}, resp: {}", title, content, resp);
return resp;
} finally {
httpClient.close();
}
}
private String generateSignedUrl() {
Long timestamp = System.currentTimeMillis();
String stringToSign = timestamp + "\n" + secret;
String sign = org.apache.dolphinscheduler.spi.utils.StringUtils.EMPTY;
try {
Mac mac = Mac.getInstance("HmacSHA256");
mac.init(new SecretKeySpec(secret.getBytes("UTF-8"), "HmacSHA256"));
byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8"));
sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)),"UTF-8");
} catch (Exception e) {
logger.error("generate sign error, message:{}", e);
}
return url + "×tamp=" + timestamp + "&sign=" + sign;
}
static final class DingTalkSendMsgResponse {
private Integer errcode;
private String errmsg;
public DingTalkSendMsgResponse() {
}
public Integer getErrcode() {
return this.errcode;
}
public void setErrcode(Integer errcode) {
this.errcode = errcode;
}
public String getErrmsg() {
return this.errmsg;
}
public void setErrmsg(String errmsg) {
this.errmsg = errmsg;
}
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof DingTalkSendMsgResponse)) {
return false;
}
final DingTalkSendMsgResponse other = (DingTalkSendMsgResponse) o;
final Object this$errcode = this.getErrcode();
final Object other$errcode = other.getErrcode();
if (this$errcode == null ? other$errcode != null : !this$errcode.equals(other$errcode)) {
return false;
}
final Object this$errmsg = this.getErrmsg();
final Object other$errmsg = other.getErrmsg();
if (this$errmsg == null ? other$errmsg != null : !this$errmsg.equals(other$errmsg)) {
return false;
}
return true;
}
public int hashCode() {
final int PRIME = 59;
int result = 1;
final Object $errcode = this.getErrcode();
result = result * PRIME + ($errcode == null ? 43 : $errcode.hashCode());
final Object $errmsg = this.getErrmsg();
result = result * PRIME + ($errmsg == null ? 43 : $errmsg.hashCode());
return result;
}
public String toString() {
return "DingTalkSender.DingTalkSendMsgResponse(errcode=" + this.getErrcode() + ", errmsg=" + this.getErrmsg() + ")";
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import org.apache.dolphinscheduler.alert.api.AlertChannel;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class DingTalkAlertChannelFactoryTest {
@Test
public void testGetParams() {
DingTalkAlertChannelFactory dingTalkAlertChannelFactory = new DingTalkAlertChannelFactory();
List<PluginParams> params = dingTalkAlertChannelFactory.params();
JSONUtils.toJsonString(params);
Assert.assertEquals(8, params.size());
}
@Test
public void testCreate() {
DingTalkAlertChannelFactory dingTalkAlertChannelFactory = new DingTalkAlertChannelFactory();
AlertChannel alertChannel = dingTalkAlertChannelFactory.create();
Assert.assertNotNull(alertChannel);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Force success': 'Force success',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name (required)': 'Please enter name (required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
'Delay execution time': 'Delay execution time',
'Delay execution': 'Delay execution',
'Forced success': 'Forced success',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to go online the process definition': 'Whether to go online the process definition',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Workflow Monitor',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
Success: 'Success',
Failed: 'Failed',
Delete: 'Delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main Class': 'Main Class',
'Main Package': 'Main Package',
'Please enter main package': 'Please enter main package',
'Please enter main class': 'Please enter main class',
'Main Arguments': 'Main Arguments',
'Please enter main arguments': 'Please enter main arguments',
'Option Parameters': 'Option Parameters',
'Please enter option parameters': 'Please enter option parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter the procedure method': 'Please enter the procedure script \n\ncall procedure:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\ncall function:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ',
'The procedure method script example': 'example:{call <procedure-name>[(?,?, ...)]} or {?= call <procedure-name>[(?,?, ...)]}',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver Cores': 'Driver Cores',
'Please enter Driver cores': 'Please enter Driver cores',
'Driver Memory': 'Driver Memory',
'Please enter Driver memory': 'Please enter Driver memory',
'Executor Number': 'Executor Number',
'Please enter Executor number': 'Please enter Executor number',
'The Executor number should be a positive integer': 'The Executor number should be a positive integer',
'Executor Memory': 'Executor Memory',
'Please enter Executor memory': 'Please enter Executor memory',
'Executor Cores': 'Executor Cores',
'Please enter Executor cores': 'Please enter Executor cores',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Core number should be positive integer': 'Core number should be positive integer',
'Flink Version': 'Flink Version',
'JobManager Memory': 'JobManager Memory',
'Please enter JobManager memory': 'Please enter JobManager memory',
'TaskManager Memory': 'TaskManager Memory',
'Please enter TaskManager memory': 'Please enter TaskManager memory',
'Slot Number': 'Slot Number',
'Please enter Slot number': 'Please enter Slot number',
Parallelism: 'Parallelism',
'Custom Parallelism': 'Configure parallelism',
'Please enter Parallelism': 'Please enter Parallelism',
'Parallelism tip': 'If there are a large number of tasks requiring complement, you can use the custom parallelism to ' +
'set the complement task thread to a reasonable value to avoid too large impact on the server.',
'Parallelism number should be positive integer': 'Parallelism number should be positive integer',
'TaskManager Number': 'TaskManager Number',
'Please enter TaskManager number': 'Please enter TaskManager number',
'App Name': 'App Name',
'Please enter app name(optional)': 'Please enter app name(optional)',
'SQL Type': 'SQL Type',
'Send Email': 'Send Email',
'Log display': 'Log display',
'rows of result': 'rows of result',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Switch To This Version': 'Switch To This Version',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Current Version': 'Current Version',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
'Switch Version Successfully': 'Switch Version Successfully',
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Dry-run flag': 'Dry-run flag',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'OS Tenant Code': 'OS Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Yarn Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the os tenant code in English': 'Please enter the os tenant code in English',
'Please enter os tenant code in English': 'Please enter os tenant code in English',
'Please enter os tenant code': 'Please enter os tenant code',
'Please enter tenant Name': 'Please enter tenant Name',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'User Type': 'User Type',
'Please enter phone number': 'Please enter phone number',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'UDF resources directory': 'UDF resources directory',
'Please select UDF resources directory': 'Please select UDF resources directory',
'Alarm group': 'Alarm group',
'Alarm group required': 'Alarm group required',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Create Alarm Instance': 'Create Alarm Instance',
'Edit Alarm Instance': 'Edit Alarm Instance',
'Group Name': 'Group Name',
'Alarm instance name': 'Alarm instance name',
'Alarm plugin name': 'Alarm plugin name',
'Select plugin': 'Select plugin',
'Select Alarm plugin': 'Please select an Alarm plugin',
'Please enter group name': 'Please enter group name',
'Instance parameter exception': 'Instance parameter exception',
'Group Type': 'Group Type',
'Alarm plugin instance': 'Alarm plugin instance',
'Please enter alarm plugin instance name': 'Please enter alarm plugin instance name',
'Select Alarm plugin instance': 'Please select an Alarm plugin instance',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
startDate: 'startDate',
endDate: 'endDate',
Date: 'Date',
Waiting: 'Waiting',
Execution: 'Execution',
Finish: 'Finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
'Upload File Size': 'Upload File size cannot exceed 1g',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
'Version Info': 'Version Info',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
Kill: 'Kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
'Copy path': 'Copy path',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'ReUpload File': 'ReUpload File',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Warning instance manage': 'Warning instance manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
Timezone: 'Timezone',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
AllStatus: 'All',
None: 'None',
Name: 'Name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Waiting Dependent complete': 'Waiting Dependent complete',
'Waiting Dependent start': 'Waiting Dependent start',
'Check interval': 'Check interval',
'Timeout must be longer than check interval': 'Timeout must be longer than check interval',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
'Whether dry-run': 'Whether dry-run',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Yarn Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
Addresses: 'Addresses',
'Worker Addresses': 'Worker Addresses',
'Please select the worker addresses': 'Please select the worker addresses',
'Failure time': 'Failure time',
'Expiration time': 'Expiration time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf',
'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username',
'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'Worker addresses cannot be empty': 'Worker addresses cannot be empty',
'Please generate token': 'Please generate token',
'Please Select token': 'Please select the expiration time of token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
TargetJobName: 'target job name',
'Please enter Pigeon job name': 'Please enter Pigeon job name',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter hive target dir': 'Please enter hive target dir',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'No resources exist': 'No resources exist',
'Please delete all non-existing resources': 'Please delete all non-existing resources',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
Kinship: 'Workflow relationship',
Reset: 'Reset',
KinshipStateActive: 'Current selection',
KinshipState1: 'Online',
KinshipState0: 'Workflow is not online',
KinshipState10: 'Scheduling is not online',
'Dag label display control': 'Dag label display control',
Enable: 'Enable',
Disable: 'Disable',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout': 'Connect Timeout',
'Socket Timeout': 'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
ms: 'ms',
'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077',
Master: 'Master',
'Please select the seatunnel resources': 'Please select the seatunnel resources',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting',
'Batch copy': 'Batch copy',
'Related items': 'Related items',
'Project name is required': 'Project name is required',
'Batch move': 'Batch move',
Version: 'Version',
'Pre tasks': 'Pre tasks',
'Running Memory': 'Running Memory',
'Max Memory': 'Max Memory',
'Min Memory': 'Min Memory',
'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate',
Info: 'Info',
'Datasource userName': 'owner',
'Resource userName': 'owner',
'Environment manage': 'Environment manage',
'Create environment': 'Create environment',
'Edit environment': 'Edit environment',
'Environment value': 'Environment value',
'Environment Name': 'Environment Name',
'Environment Code': 'Environment Code',
'Environment Config': 'Environment Config',
'Environment Desc': 'Environment Desc',
'Environment Worker Group': 'Worker Groups',
'Please enter environment config': 'Please enter environment config',
'Please enter environment desc': 'Please enter environment desc',
'Please select worker groups': 'Please select worker groups',
condition: 'condition',
'The condition content cannot be empty': 'The condition content cannot be empty',
'Reference from': 'Reference from',
'No more...': 'No more...',
'Task Definition': 'Task Definition',
'Create task': 'Create task',
'Task Type': 'Task Type',
'Process execute type': 'Process execute type',
parallel: 'parallel',
'Serial wait': 'Serial wait',
'Serial discard': 'Serial discard',
'Serial priority': 'Serial priority',
'Recover serial wait': 'Recover serial wait',
IsEnableProxy: 'Enable Proxy',
WebHook: 'WebHook',
webHook: 'WebHook',
Keyword: 'Keyword',
Secret: 'Secret',
Proxy: 'Proxy',
receivers: 'Receivers',
receiverCcs: 'ReceiverCcs',
transportProtocol: 'Transport Protocol',
serverHost: 'SMTP Host',
serverPort: 'SMTP Port',
sender: 'Sender',
enableSmtpAuth: 'SMTP Auth',
starttlsEnable: 'SMTP STARTTLS Enable',
sslEnable: 'SMTP SSL Enable',
smtpSslTrust: 'SMTP SSL Trust',
url: 'URL',
requestType: 'Request Type',
headerParams: 'Headers',
bodyParams: 'Body',
contentField: 'Content Field',
path: 'Script Path',
userParams: 'User Params',
corpId: 'CorpId',
secret: 'Secret',
userSendMsg: 'UserSendMsg',
agentId: 'AgentId',
users: 'Users',
Username: 'Username',
username: 'Username',
showType: 'Show Type',
'Please select a task type (required)': 'Please select a task type (required)',
layoutType: 'Layout Type',
gridLayout: 'Grid',
dagreLayout: 'Dagre',
rows: 'Rows',
cols: 'Cols',
processOnline: 'Online',
searchNode: 'Search Node',
dagScale: 'Scale',
workflowName: 'Workflow Name',
scheduleStartTime: 'Schedule Start Time',
scheduleEndTime: 'Schedule End Time',
crontabExpression: 'Crontab',
workflowPublishStatus: 'Workflow Publish Status',
schedulePublishStatus: 'Schedule Publish Status',
'Task group manage': 'Task group manage',
'Task group option': 'Task group option',
'Create task group': 'Create task group',
'Edit task group': 'Edit task group',
'Delete task group': 'Delete task group',
'Task group code': 'Task group code',
'Task group name': 'Task group name',
'Task group resource pool size': 'Resource pool size',
'Task group resource pool size be a number': 'The size of the task group resource pool should be more than 1',
'Task group resource used pool size': 'Used resource',
'Task group desc': 'Task group desc',
'Task group status': 'Task group status',
'Task group enable status': 'Enable',
'Task group disable status': 'Disable',
'Please enter task group desc': 'Please enter task group description',
'Please enter task group resource pool size': 'Please enter task group resource pool size',
'Please select project': 'Please select a project',
'Task group queue': 'Task group queue',
'Task group queue priority': 'Priority',
'Task group queue priority be a number': 'The priority of the task group queue should be a positive number',
'Task group queue force starting status': 'Starting status',
'Task group in queue': 'In queue',
'Task group queue status': 'Task status',
'View task group queue': 'View task group queue',
'Task group queue the status of waiting': 'Waiting into the queue',
'Task group queue the status of queuing': 'Queuing',
'Task group queue the status of releasing': 'Released',
'Modify task group queue priority': 'Edit the priority of the task group queue',
'Priority not empty': 'The value of priority can not be empty',
'Priority must be number': 'The value of priority should be number',
'Please select task name': 'Please select a task name',
'Process State': 'Process State',
'Upstream Tasks': 'Upstream Tasks',
'and {n} more': '… and {n} more',
'Move task': 'Move task',
'Delete task {taskName} from process {processName}?': 'Delete task {taskName} from process {processName}?',
'Delete task completely': 'Delete task completely',
'Please select a process': 'Please select a process',
'Delete {taskName}?': 'Delete {taskName}?',
'Please select a process (required)': 'Please select a process (required)'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,138 | [Feature][Task] add at@ msg in the dingtalk task plugin | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
add at@ msg in the dingtalk task plugin
When a custom bot sends a message, you can specify the "@person list" by your mobile phone number. When the people in the "@people list" receive the message, there will be a @ message reminder. Do not disturb conversations still notify reminders, and "someone @ you" appears on the fold.
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8138 | https://github.com/apache/dolphinscheduler/pull/8139 | e0dbf3edc6200ed2abd1dffbcfaff8b53d7996f4 | 1fd748f376ad652783c94fd7c2d9e602291acb16 | "2022-01-20T10:53:08Z" | java | "2022-01-21T09:09:39Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Force success': '强制成功',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name (required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
'Delay execution time': '延时执行时间',
'Delay execution': '延时执行',
'Forced success': '强制成功',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to go online the process definition': '是否上线流程定义',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '工作流监控',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
Success: '成功',
Failed: '失败',
Delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main Class': '主函数的Class',
'Main Package': '主程序包',
'Please enter main package': '请选择主程序包',
'Please enter main class': '请填写主函数的Class',
'Main Arguments': '主程序参数',
'Please enter main arguments': '请输入主程序参数',
'Option Parameters': '选项参数',
'Please enter option parameters': '请输入选项参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
Datasource: '数据源',
methods: '方法',
'Please enter the procedure method': '请输入存储脚本 \n\n调用存储过程:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\n调用存储函数:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ',
'The procedure method script example': '示例:{call <procedure-name>[(?,?, ...)]} 或 {?= call <procedure-name>[(?,?, ...)]}',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver Cores': 'Driver核心数',
'Please enter Driver cores': '请输入Driver核心数',
'Driver Memory': 'Driver内存数',
'Please enter Driver memory': '请输入Driver内存数',
'Executor Number': 'Executor数量',
'Please enter Executor number': '请输入Executor数量',
'The Executor number should be a positive integer': 'Executor数量为正整数',
'Executor Memory': 'Executor内存数',
'Please enter Executor memory': '请输入Executor内存数',
'Executor Cores': 'Executor核心数',
'Please enter Executor cores': '请输入Executor核心数',
'Memory should be a positive integer': '内存数为数字',
'Core number should be positive integer': '核心数为正整数',
'Flink Version': 'Flink版本',
'JobManager Memory': 'JobManager内存数',
'Please enter JobManager memory': '请输入JobManager内存数',
'TaskManager Memory': 'TaskManager内存数',
'Please enter TaskManager memory': '请输入TaskManager内存数',
'Slot Number': 'Slot数量',
'Please enter Slot number': '请输入Slot数量',
Parallelism: '并行度',
'Custom Parallelism': '自定义并行度',
'Please enter Parallelism': '请输入并行度',
'Parallelism number should be positive integer': '并行度必须为正整数',
'Parallelism tip': '如果存在大量任务需要补数时,可以利用自定义并行度将补数的任务线程设置成合理的数值,避免对服务器造成过大的影响',
'TaskManager Number': 'TaskManager数量',
'Please enter TaskManager number': '请输入TaskManager数量',
'App Name': '任务名称',
'Please enter app name(optional)': '请输入任务名称(选填)',
'SQL Type': 'sql类型',
'Send Email': '发送邮件',
'Log display': '日志显示',
'rows of result': '行查询结果',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Switch To This Version': '切换到该版本',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Current Version': '当前版本',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
'Switch Version Successfully': '切换版本成功',
'Confirm Switch To This Version?': '确定切换到该版本吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Dry-run flag': '空跑标识',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'OS Tenant Code': '操作系统租户',
'Tenant Name': '租户名称',
Queue: '队列',
'Please select a queue': '默认为租户关联队列',
'Please enter the os tenant code in English': '请输入操作系统租户只允许英文',
'Please enter os tenant code in English': '请输入英文操作系统租户',
'Please enter os tenant code': '请输入操作系统租户',
'Please enter tenant Name': '请输入租户名称',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'User Type': '用户类型',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Alarm group': '告警组',
'Alarm group required': '告警组必填',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Create Alarm Instance': '创建告警实例',
'Edit Alarm Instance': '编辑告警实例',
'Group Name': '组名称',
'Alarm instance name': '告警实例名称',
'Alarm plugin name': '告警插件名称',
'Select plugin': '选择插件',
'Select Alarm plugin': '请选择告警插件',
'Please enter group name': '请输入组名称',
'Instance parameter exception': '实例参数异常',
'Group Type': '组类型',
'Alarm plugin instance': '告警插件实例',
'Please enter alarm plugin instance name': '请输入告警实例名称',
'Select Alarm plugin instance': '请选择告警插件实例',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': '自定义参数prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
startDate: '开始日期',
endDate: '结束日期',
Date: '日期',
Waiting: '等待',
Execution: '执行中',
Finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
'Version Info': '版本信息',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
Kill: 'Kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
'Copy path': '复制路径',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'ReUpload File': '重新上传文件',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Warning instance manage': '告警实例管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
Timezone: '时区',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
AllStatus: '全部状态',
None: '无',
Name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Waiting Dependent complete': '等待依赖完成',
'Waiting Dependent start': '等待依赖启动',
'Check interval': '检查间隔',
'Timeout must be longer than check interval': '超时时间必须比检查间隔长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
'Whether dry-run': '是否空跑',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': 'Yarn 队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
Addresses: '地址',
'Worker Addresses': 'Worker地址',
'Please select the worker addresses': '请选择Worker地址',
'Failure time': '失效时间',
'Expiration time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf',
'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username',
'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'Worker addresses cannot be empty': 'Worker地址不能为空',
'Please generate token': '请生成Token',
'Please Select token': '请选择Token失效时间',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
TargetJobName: '目标任务名',
'Please enter Pigeon job name': '请输入Pigeon任务名',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter hive target dir': '请输入Hive临时目录',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'No resources exist': '不存在资源',
'Please delete all non-existing resources': '请删除所有不存在资源',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
Kinship: '工作流关系',
Reset: '重置',
KinshipStateActive: '当前选择',
KinshipState1: '已上线',
KinshipState0: '工作流未上线',
KinshipState10: '调度未上线',
'Dag label display control': 'Dag节点名称显隐',
Enable: '启用',
Disable: '停用',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout': '连接超时',
'Socket Timeout': 'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
ms: '毫秒',
'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077',
Master: 'Master',
'Please select the seatunnel resources': '请选择 seatunnel 配置文件',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG',
'Batch copy': '批量复制',
'Related items': '关联项目',
'Project name is required': '项目名称必填',
'Batch move': '批量移动',
Version: '版本',
'Pre tasks': '前置任务',
'Running Memory': '运行内存',
'Max Memory': '最大内存',
'Min Memory': '最小内存',
'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建',
Info: '提示',
'Datasource userName': '所属用户',
'Resource userName': '所属用户',
'Environment manage': '环境管理',
'Create environment': '创建环境',
'Edit environment': '编辑',
'Environment value': 'Environment value',
'Environment Name': '环境名称',
'Environment Code': '环境编码',
'Environment Config': '环境配置',
'Environment Desc': '详细描述',
'Environment Worker Group': 'Worker组',
'Please enter environment config': '请输入环境配置信息',
'Please enter environment desc': '请输入详细描述',
'Please select worker groups': '请选择Worker分组',
condition: '条件',
'The condition content cannot be empty': '条件内容不能为空',
'Reference from': '使用已有任务',
'No more...': '没有更多了...',
'Task Definition': '任务定义',
'Create task': '创建任务',
'Task Type': '任务类型',
'Process execute type': '执行策略',
parallel: '并行',
'Serial wait': '串行等待',
'Serial discard': '串行抛弃',
'Serial priority': '串行优先',
'Recover serial wait': '串行恢复',
IsEnableProxy: '启用代理',
WebHook: 'Web钩子',
webHook: 'Web钩子',
Keyword: '关键词',
Secret: '密钥',
Proxy: '代理',
receivers: '收件人',
receiverCcs: '抄送人',
transportProtocol: '邮件协议',
serverHost: 'SMTP服务器',
serverPort: 'SMTP端口',
sender: '发件人',
enableSmtpAuth: '请求认证',
starttlsEnable: 'STARTTLS连接',
sslEnable: 'SSL连接',
smtpSslTrust: 'SSL证书信任',
url: 'URL',
requestType: '请求方式',
headerParams: '请求头',
bodyParams: '请求体',
contentField: '内容字段',
path: '脚本路径',
userParams: '自定义参数',
corpId: '企业ID',
secret: '密钥',
teamSendMsg: '群发信息',
userSendMsg: '群员信息',
agentId: '应用ID',
users: '群员',
Username: '用户名',
username: '用户名',
showType: '内容展示类型',
'Please select a task type (required)': '请选择任务类型(必选)',
layoutType: '布局类型',
gridLayout: '网格布局',
dagreLayout: '层次布局',
rows: '行数',
cols: '列数',
processOnline: '已上线',
searchNode: '搜索节点',
dagScale: '缩放',
workflowName: '工作流名称',
scheduleStartTime: '定时开始时间',
scheduleEndTime: '定时结束时间',
crontabExpression: 'Crontab',
workflowPublishStatus: '工作流上线状态',
schedulePublishStatus: '定时状态',
'Task group manage': '任务组管理',
'Task group option': '任务组配置',
'Create task group': '创建任务组',
'Edit task group': '编辑任务组',
'Delete task group': '删除任务组',
'Task group code': '任务组编号',
'Task group name': '任务组名称',
'Task group resource pool size': '资源容量',
'Task group resource used pool size': '已用资源',
'Task group desc': '描述信息',
'Task group status': '任务组状态',
'Task group enable status': '启用',
'Task group disable status': '不可用',
'Please enter task group desc': '请输入任务组描述',
'Please enter task group resource pool size': '请输入资源容量大小',
'Task group resource pool size be a number': '资源容量大小必须大于等于1的数值',
'Please select project': '请选择项目',
'Task group queue': '任务组队列',
'Task group queue priority': '组内优先级',
'Task group queue priority be a number': '优先级必须是大于等于0的数值',
'Task group queue force starting status': '是否强制启动',
'Task group in queue': '是否排队中',
'Task group queue status': '任务状态',
'View task group queue': '查看任务组队列',
'Task group queue the status of waiting': '等待入队',
'Task group queue the status of queuing': '排队中',
'Task group queue the status of releasing': '已释放',
'Modify task group queue priority': '修改优先级',
'Force to start task': '强制启动',
'Priority not empty': '优先级不能为空',
'Priority must be number': '优先级必须是数值',
'Please select task name': '请选择节点名称',
'Process State': '工作流状态',
'Upstream Tasks': '上游任务',
'and {n} more': '…等{n}个',
'Move task': '移动任务',
'Delete task {taskName} from process {processName}?': '将任务 {taskName} 从工作流 {processName} 中删除?',
'Delete task completely': '彻底删除任务',
'Please select a process': '请选择工作流',
'Delete {taskName}?': '确定删除 {taskName} ?',
'Please select a process (required)': '请选择工作流(必选)'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/router/modules/resources.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { Component } from 'vue'
import utils from '@/utils'
// All TSX files under the views folder automatically generate mapping relationship
const modules = import.meta.glob('/src/views/**/**.tsx')
const components: { [key: string]: Component } = utils.mapping(modules)
export default {
path: '/resource',
name: 'resource',
redirect: { name: 'file-manage' },
meta: { title: '资源中心' },
component: () => import('@/layouts/content'),
children: [
{
path: '/resource/file-manage',
name: 'file-manage',
component: components['resource-file'],
meta: {
title: '文件管理',
showSide: true
}
},
{
path: '/resource/file/create',
name: 'resource-file-create',
component: components['resource-file-create'],
meta: {
title: '文件创建',
showSide: true
}
},
{
path: '/resource/file/edit/:id',
name: 'resource-file-edit',
component: components['resource-file-edit'],
meta: {
title: '文件编辑',
showSide: true
}
},
{
path: '/resource/file/subdirectory/:id',
name: 'resource-file-subdirectory',
component: components['resource-file'],
meta: {
title: '文件管理',
showSide: true
}
},
{
path: '/resource/file/list/:id',
name: 'resource-file-list',
component: components['resource-file-edit'],
meta: {
title: '文件详情',
showSide: true
}
},
{
path: '/resource/file/create/:id',
name: 'resource-subfile-create',
component: components['resource-file-create'],
meta: {
title: '文件创建',
showSide: true
}
},
{
path: '/resource/resource-manage',
name: 'resource-manage',
component: components['resource'],
meta: {
title: '资源管理',
showSide: true
}
},
{
path: '/resource/resource-manage/:id',
name: 'resource-sub-manage',
component: components['resource'],
meta: {
title: '资源管理',
showSide: true
}
},
{
path: '/resource/function-manage',
name: 'function-manage',
component: components['resource-udf-function'],
meta: {
title: '函数管理'
}
},
{
path: '/resource/task-group',
name: 'task-group-manage',
component: components['resource-taskGroupOption'],
children: [
{
path: '/resource/task-group-option',
name: 'task-group-option',
component: components['resource-taskGroupOption']
},
{
path: '/resource/task-group-queue',
name: 'task-group-queue',
component: components['resource-taskGroupQueue']
}
]
}
]
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/components/form-modal.tsx | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/components/table-action.tsx | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/index.module.scss | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/index.tsx | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/use-form.ts | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,158 | [Improvement][UI-Next] Need to alter the structure of directories and folder's name about the component of task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Need to use the way of spinal case to name folders in the module of 'ui-next' .
![image](https://user-images.githubusercontent.com/4928204/150617820-de40ce32-f18b-4215-9286-209c8cfa06eb.png)
### Use case
Change the name of 'taskGroupOption' into 'task-group-option'
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8158 | https://github.com/apache/dolphinscheduler/pull/8159 | 8c65b194c7614943d3ba0f310fbc0da1ea692f0e | d379137908c42e945a1ab07318abac6714d599df | "2022-01-22T01:04:37Z" | java | "2022-01-22T01:34:05Z" | dolphinscheduler-ui-next/src/views/resource/task-group/option/use-table.ts | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,182 | [Bug] [plugin] python task execute error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
PYTHON_HOME configuration is as follows:
export PYTHON_HOME=/usr/bin/python3
When performing the task, the prompt cannot find the relevant python module:
![image](https://user-images.githubusercontent.com/18095523/150887223-b3dcd8cd-9b6e-414d-8cc0-0729d43edf34.png)
Scripts run correctly on the server.
![image](https://user-images.githubusercontent.com/18095523/150887285-22026771-966a-47bc-9556-95f4aa27ed56.png)
If I change the "python” to “python3”, the python task will run successfully.
![image](https://user-images.githubusercontent.com/18095523/150887368-811ff7d9-6acb-4d19-889d-94fa2f854980.png)
or
Delete the python soft link on the server, create a new soft link and point to python3, and the task can also run successfully.
![image](https://user-images.githubusercontent.com/18095523/150887617-5da1cc0a-fbdf-43ac-858f-f71350bb4b53.png)
![image](https://user-images.githubusercontent.com/18095523/150887651-0683d1f6-8c96-49df-a326-71da67cbc05a.png)
### What you expected to happen
Python tasks can be executed normally.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8182 | https://github.com/apache/dolphinscheduler/pull/8196 | fb9f4a1339b31fdfcf43828cbe024ac11d8392d1 | a1cc2fd11ac97b2bace6977f946c0bc0480cc620 | "2022-01-25T00:28:28Z" | java | "2022-01-26T03:09:57Z" | dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.python;
import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor;
import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskResponse;
import org.apache.dolphinscheduler.plugin.task.util.MapUtils;
import org.apache.dolphinscheduler.spi.task.AbstractParameters;
import org.apache.dolphinscheduler.spi.task.Property;
import org.apache.dolphinscheduler.spi.task.TaskConstants;
import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils;
import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
/**
* python task
*/
public class PythonTask extends AbstractTaskExecutor {
/**
* python parameters
*/
private PythonParameters pythonParameters;
/**
* shell command executor
*/
private ShellCommandExecutor shellCommandExecutor;
private TaskRequest taskRequest;
/**
* constructor
*
* @param taskRequest taskRequest
*/
public PythonTask(TaskRequest taskRequest) {
super(taskRequest);
this.taskRequest = taskRequest;
this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle,
taskRequest,
logger);
}
@Override
public void init() {
logger.info("python task params {}", taskRequest.getTaskParams());
pythonParameters = JSONUtils.parseObject(taskRequest.getTaskParams(), PythonParameters.class);
if (!pythonParameters.checkParameters()) {
throw new TaskException("python task params is not valid");
}
}
@Override
public String getPreScript() {
String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n");
try {
rawPythonScript = convertPythonScriptPlaceholders(rawPythonScript);
} catch (StringIndexOutOfBoundsException e) {
logger.error("setShareVar field format error, raw python script : {}", rawPythonScript);
}
return rawPythonScript;
}
@Override
public void handle() throws Exception {
try {
// generate the content of this python script
String pythonScriptContent = buildPythonScriptContent();
// generate the file path of this python script
String pythonScriptFile = buildPythonCommandFilePath();
// create this file
createPythonCommandFileIfNotExists(pythonScriptContent,pythonScriptFile);
String command = "python " + pythonScriptFile;
TaskResponse taskResponse = shellCommandExecutor.run(command);
setExitStatusCode(taskResponse.getExitStatusCode());
setAppIds(taskResponse.getAppIds());
setProcessId(taskResponse.getProcessId());
setVarPool(shellCommandExecutor.getVarPool());
} catch (Exception e) {
logger.error("python task failure", e);
setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE);
throw new TaskException("run python task error", e);
}
}
@Override
public void cancelApplication(boolean cancelApplication) throws Exception {
// cancel process
shellCommandExecutor.cancelApplication();
}
@Override
public AbstractParameters getParameters() {
return pythonParameters;
}
/**
* convertPythonScriptPlaceholders
*
* @param rawScript rawScript
* @return String
* @throws StringIndexOutOfBoundsException StringIndexOutOfBoundsException
*/
private static String convertPythonScriptPlaceholders(String rawScript) throws StringIndexOutOfBoundsException {
int len = "${setShareVar(${".length();
int scriptStart = 0;
while ((scriptStart = rawScript.indexOf("${setShareVar(${", scriptStart)) != -1) {
int start = -1;
int end = rawScript.indexOf('}', scriptStart + len);
String prop = rawScript.substring(scriptStart + len, end);
start = rawScript.indexOf(',', end);
end = rawScript.indexOf(')', start);
String value = rawScript.substring(start + 1, end);
start = rawScript.indexOf('}', start) + 1;
end = rawScript.length();
String replaceScript = String.format("print(\"${{setValue({},{})}}\".format(\"%s\",%s))", prop, value);
rawScript = rawScript.substring(0, scriptStart) + replaceScript + rawScript.substring(start, end);
scriptStart += replaceScript.length();
}
return rawScript;
}
/**
* create python command file if not exists
*
* @param pythonScript exec python script
* @param pythonScriptFile python script file
* @throws IOException io exception
*/
protected void createPythonCommandFileIfNotExists(String pythonScript, String pythonScriptFile) throws IOException {
logger.info("tenantCode :{}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath());
if (!Files.exists(Paths.get(pythonScriptFile))) {
logger.info("generate python script file:{}", pythonScriptFile);
StringBuilder sb = new StringBuilder();
sb.append("#-*- encoding=utf8 -*-\n");
sb.append("\n\n");
sb.append(pythonScript);
logger.info(sb.toString());
// write data to file
FileUtils.writeStringToFile(new File(pythonScriptFile),
sb.toString(),
StandardCharsets.UTF_8);
}
}
/**
* build python command file path
*
* @return python command file path
*/
protected String buildPythonCommandFilePath() {
return String.format("%s/py_%s.py", taskRequest.getExecutePath(), taskRequest.getTaskAppId());
}
/**
* build python script content
*
* @return raw python script
* @throws Exception exception
*/
private String buildPythonScriptContent() throws Exception {
String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n");
// replace placeholder
Map<String, Property> paramsMap = ParamUtils.convert(taskRequest, pythonParameters);
if (MapUtils.isEmpty(paramsMap)) {
paramsMap = new HashMap<>();
}
if (MapUtils.isNotEmpty(taskRequest.getParamsMap())) {
paramsMap.putAll(taskRequest.getParamsMap());
}
rawPythonScript = ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap));
logger.info("raw python script : {}", pythonParameters.getRawScript());
return rawPythonScript;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,182 | [Bug] [plugin] python task execute error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
PYTHON_HOME configuration is as follows:
export PYTHON_HOME=/usr/bin/python3
When performing the task, the prompt cannot find the relevant python module:
![image](https://user-images.githubusercontent.com/18095523/150887223-b3dcd8cd-9b6e-414d-8cc0-0729d43edf34.png)
Scripts run correctly on the server.
![image](https://user-images.githubusercontent.com/18095523/150887285-22026771-966a-47bc-9556-95f4aa27ed56.png)
If I change the "python” to “python3”, the python task will run successfully.
![image](https://user-images.githubusercontent.com/18095523/150887368-811ff7d9-6acb-4d19-889d-94fa2f854980.png)
or
Delete the python soft link on the server, create a new soft link and point to python3, and the task can also run successfully.
![image](https://user-images.githubusercontent.com/18095523/150887617-5da1cc0a-fbdf-43ac-858f-f71350bb4b53.png)
![image](https://user-images.githubusercontent.com/18095523/150887651-0683d1f6-8c96-49df-a326-71da67cbc05a.png)
### What you expected to happen
Python tasks can be executed normally.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8182 | https://github.com/apache/dolphinscheduler/pull/8196 | fb9f4a1339b31fdfcf43828cbe024ac11d8392d1 | a1cc2fd11ac97b2bace6977f946c0bc0480cc620 | "2022-01-25T00:28:28Z" | java | "2022-01-26T03:09:57Z" | dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/test/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,204 | [Bug] [Dev] standalone-server start error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/01/26/yHTEOKlQFXMu6Gz.png)
### What you expected to happen
start successfully.
### How to reproduce
./bin/dolphinscheduler-daemon.sh start standalone-server
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8204 | https://github.com/apache/dolphinscheduler/pull/8205 | 7945a2f3aad15649e671dfa2e80586b14f3472e4 | b05ba4d1b9e6e66fb42f9cd5f7212e919c07725c | "2022-01-26T08:17:34Z" | java | "2022-01-26T09:51:20Z" | dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.HostUpdateCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRemoteChannel;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.google.common.base.Preconditions;
import io.netty.channel.Channel;
/**
* update process host
* this used when master failover
*/
@Component
public class HostUpdateProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(HostUpdateProcessor.class);
/**
* task callback service
*/
private final TaskCallbackService taskCallbackService;
public HostUpdateProcessor() {
this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class);
}
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.PROCESS_HOST_UPDATE_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType()));
HostUpdateCommand updateCommand = JSONUtils.parseObject(command.getBody(), HostUpdateCommand.class);
logger.info("received host update command : {}", updateCommand);
taskCallbackService.changeRemoteChannel(updateCommand.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque()));
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,169 | [Bug] [UI] After you save the workflow, it jumps to the workflow list page | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Developers generally write a task to save once, and then jump to the workflow list page, whether it can stay on the workflow editing page like version 1.3.X, wait for all tasks to be written, and then click the close button to leave the current page.
Like this:
![image](https://user-images.githubusercontent.com/18095523/150745585-a7faf0f5-142f-49df-b090-7d7f4e9b00cd.png)
### What you expected to happen
Do not leave the workflow edit page after saving.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8169 | https://github.com/apache/dolphinscheduler/pull/8198 | 3c10bbe1571573257c855667d99590c6aa87b423 | baf0c95add2772f4ea6f08788f216b5ef83557ed | "2022-01-24T08:15:10Z" | java | "2022-01-28T10:42:36Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div :class="['dag-chart', fullScreen ? 'full-screen' : '']">
<dag-toolbar />
<dag-canvas ref="canvas" />
<el-drawer
:visible.sync="taskDrawer"
size=""
:with-header="false"
:wrapperClosable="false"
class="task-drawer"
>
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-form-model
v-if="taskDrawer"
:nodeData="nodeData"
:project-code="projectCode"
@seeHistory="seeHistory"
@addTaskInfo="addTaskInfo"
@close="closeTaskDrawer"
@onSubProcess="toSubProcess"
:type="type"
></m-form-model>
</el-drawer>
<el-dialog
:title="$t('Set the DAG diagram name')"
:visible.sync="saveDialog"
width="auto"
>
<m-udp ref="mUdp" @onUdp="onSave" @close="cancelSave"></m-udp>
</el-dialog>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="auto"
>
<m-start
:startData="{ code: definitionCode, name: name }"
:startNodeList="startTaskName"
:sourceType="'contextmenu'"
@onUpdateStart="onUpdateStart"
@closeStart="closeStart"
></m-start>
</el-dialog>
<edge-edit-model ref="edgeEditModel" />
<el-drawer :visible.sync="versionDrawer" size="" :with-header="false">
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-versions
:versionData="versionData"
:isInstance="type === 'instance'"
@mVersionSwitchProcessDefinitionVersion="switchProcessVersion"
@mVersionGetProcessDefinitionVersionsPage="getProcessVersions"
@mVersionDeleteProcessDefinitionVersion="deleteProcessVersion"
@closeVersion="closeVersion"
></m-versions>
</el-drawer>
<m-log
v-if="type === 'instance' && logDialog"
:item="logTaskInstance"
source='dag'
:task-instance-id="logTaskInstance.id"
@close="closeLogDialog"
></m-log>
</div>
</template>
<script>
import { debounce } from 'lodash'
import dagToolbar from './canvas/toolbar.vue'
import dagCanvas from './canvas/canvas.vue'
import mFormModel from '../_source/formModel/formModel.vue'
import { mapActions, mapState, mapMutations } from 'vuex'
import mUdp from '../_source/udp/udp.vue'
import mStart from '../../projects/pages/definition/pages/list/_source/start.vue'
import edgeEditModel from './canvas/edgeEditModel.vue'
import mVersions from '../../projects/pages/definition/pages/list/_source/versions.vue'
import mLog from './formModel/log.vue'
const DEFAULT_NODE_DATA = {
id: null,
taskType: '',
self: {},
instanceId: null
}
export default {
name: 'dag-chart',
components: {
dagCanvas,
dagToolbar,
mFormModel,
mUdp,
mStart,
edgeEditModel,
mVersions,
mLog
},
provide () {
return {
dagChart: this
}
},
inject: ['definitionDetails'],
props: {
type: String,
releaseState: String
},
data () {
return {
definitionCode: 0,
// full screen mode
fullScreen: false,
// whether the task config drawer is visible
taskDrawer: false,
nodeData: { ...DEFAULT_NODE_DATA },
// whether the save dialog is visible
saveDialog: false,
// whether the start dialog is visible
startDialog: false,
startTaskName: '',
// whether the version drawer is visible
versionDrawer: false,
versionData: {
processDefinition: {
id: null,
version: '',
releaseState: ''
},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
// the task status refresh timer
statusTimer: null,
// the process instance id
instanceId: -1,
// log dialog
logDialog: false,
logTaskInstance: null,
taskInstances: []
}
},
mounted () {
this.setIsEditDag(false)
if (this.type === 'instance') {
this.instanceId = this.$route.params.id
this.definitionCode = this.$route.query.code || this.code
} else if (this.type === 'definition') {
this.definitionCode = this.$route.params.code
}
// auto resize canvas
this.resizeDebounceFunc = debounce(this.canvasResize, 200)
window.addEventListener('resize', this.resizeDebounceFunc)
// init graph
this.$refs.canvas.graphInit(!this.isDetails)
// backfill graph with tasks, locations and connects
this.backfill()
// refresh task status
if (this.type === 'instance') {
this.refreshTaskStatus()
// status polling
this.statusTimer = setInterval(() => {
this.refreshTaskStatus()
}, 90000)
}
},
beforeDestroy () {
this.resetParams()
clearInterval(this.statusTimer)
window.removeEventListener('resize', this.resizeDebounceFunc)
},
computed: {
...mapState('dag', [
'tasks',
'locations',
'connects',
'name',
'isDetails',
'projectCode',
'version',
'code'
])
},
methods: {
...mapActions('dag', [
'saveDAGchart',
'updateInstance',
'updateDefinition',
'getTaskState',
'getStartCheck',
'genTaskCodeList',
'switchProcessDefinitionVersion',
'getProcessDefinitionVersionsPage',
'deleteProcessDefinitionVersion'
]),
...mapMutations('dag', [
'addTask',
'setConnects',
'resetParams',
'setIsEditDag',
'setName',
'setLocations',
'resetLocalParam',
'setDependResult'
]),
/**
* Toggle full screen
*/
canvasResize () {
const canvas = this.$refs.canvas
canvas && canvas.paperResize()
},
toggleFullScreen () {
this.fullScreen = !this.fullScreen
this.$nextTick(this.canvasResize)
},
/**
* Task Drawer
* @param {boolean} visible
*/
toggleTaskDrawer (visible) {
this.taskDrawer = visible
},
/**
* Set the current node data
*/
setNodeData (nodeData) {
this.nodeData = Object.assign(DEFAULT_NODE_DATA, nodeData)
},
/**
* open form model
* @desc Edit task config
* @param {number} taskCode
* @param {string} taskType
*/
openFormModel (taskCode, taskType) {
this.setNodeData({
id: taskCode,
taskType: taskType
})
this.toggleTaskDrawer(true)
},
addTaskInfo ({ item }) {
this.addTask(item)
this.$refs.canvas.setNodeName(item.code, item.name)
this.taskDrawer = false
},
closeTaskDrawer ({ flag }) {
if (flag) {
const canvas = this.$refs.canvas
canvas.removeNode(this.nodeData.id)
}
this.taskDrawer = false
},
/**
* Save dialog
*/
toggleSaveDialog (value) {
this.saveDialog = value
if (value) {
this.$nextTick(() => {
this.$refs.mUdp.reloadParam()
})
}
},
onSave (sourceType) {
this.toggleSaveDialog(false)
return new Promise((resolve, reject) => {
let tasks = this.tasks || []
const edges = this.$refs.canvas.getEdges()
const nodes = this.$refs.canvas.getNodes()
if (!nodes.length) {
reject(this.$t('Failed to create node to save'))
}
const connects = this.buildConnects(edges, tasks)
this.setConnects(connects)
const locations = nodes.map((node) => {
return {
taskCode: node.id,
x: node.position.x,
y: node.position.y
}
})
this.setLocations(locations)
resolve({
connects: connects,
tasks: tasks,
locations: locations
})
})
.then((res) => {
if (this.verifyConditions(res.tasks)) {
this.loading(true)
const isEdit = !!this.definitionCode
if (isEdit) {
const methodName = this.type === 'instance' ? 'updateInstance' : 'updateDefinition'
const methodParam = this.type === 'instance' ? this.instanceId : this.definitionCode
// Edit
return this[methodName](methodParam)
.then((res) => {
this.$message({
message: res.msg,
type: 'success',
offset: 80
})
if (this.type === 'instance') {
this.$router.push({
path: `/projects/${this.projectCode}/instance/list`
})
} else {
this.$router.push({
path: `/projects/${this.projectCode}/definition/list`
})
}
})
.catch((e) => {
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
} else {
// Create
return this.saveDAGchart()
.then((res) => {
this.$message.success(res.msg)
// source @/conf/home/pages/dag/_source/editAffirmModel/index.js
if (sourceType !== 'affirm') {
// Jump process definition
this.$router.push({ name: 'projects-definition-list' })
}
})
.catch((e) => {
this.setName('')
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
}
}
})
.catch((err) => {
let msg = typeof err === 'string' ? err : err.msg || ''
this.$message.error(msg)
})
},
verifyConditions (value) {
let tasks = value
let bool = true
tasks.map((v) => {
if (
v.taskType === 'CONDITIONS' &&
(v.taskParams.conditionResult.successNode[0] === '' ||
v.taskParams.conditionResult.successNode[0] === null ||
v.taskParams.conditionResult.failedNode[0] === '' ||
v.taskParams.conditionResult.failedNode[0] === null)
) {
bool = false
return false
}
})
if (!bool) {
this.$message.warning(
`${this.$t(
'Successful branch flow and failed branch flow are required'
)}`
)
return false
}
return true
},
cancelSave () {
this.toggleSaveDialog(false)
},
/**
* build graph json
*/
buildGraphJSON (tasks, locations, connects) {
const nodes = []
const edges = []
if (!locations) { locations = [] }
tasks.forEach((task) => {
const location = locations.find((l) => l.taskCode === task.code) || {}
const node = this.$refs.canvas.genNodeJSON(
task.code,
task.taskType,
task.name,
{
x: location.x,
y: location.y
}
)
nodes.push(node)
})
connects
.filter((r) => !!r.preTaskCode)
.forEach((c) => {
const edge = this.$refs.canvas.genEdgeJSON(
c.preTaskCode,
c.postTaskCode,
c.name
)
edges.push(edge)
})
return {
nodes,
edges
}
},
/**
* Build connects by edges and tasks
* @param {Edge[]} edges
* @param {Task[]} tasks
* @returns
*/
buildConnects (edges, tasks) {
const preTaskMap = {}
const tasksMap = {}
edges.forEach((edge) => {
preTaskMap[edge.targetId] = {
sourceId: edge.sourceId,
edgeLabel: edge.label || ''
}
})
tasks.forEach((task) => {
tasksMap[task.code] = task
})
const headEdges = tasks
.filter((task) => !preTaskMap[task.code])
.map((task) => {
return {
name: '',
preTaskCode: 0,
preTaskVersion: 0,
postTaskCode: task.code,
postTaskVersion: task.version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
return edges
.map((edge) => {
return {
name: edge.label,
preTaskCode: edge.sourceId,
preTaskVersion: tasksMap[edge.sourceId].version || 0,
postTaskCode: edge.targetId,
postTaskVersion: tasksMap[edge.targetId].version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
.concat(headEdges)
},
backfill () {
const tasks = this.tasks
const locations = this.locations
const connects = this.connects
const json = this.buildGraphJSON(tasks, locations, connects)
this.$refs.canvas.fromJSON(json)
// Auto format
if (!locations) {
this.$refs.canvas.format()
}
},
/**
* Return to the previous process
*/
returnToPrevProcess () {
let $name = this.$route.name.split('-')
let subs = this.$route.query.subs
let ids = subs.split(',')
const last = ids.pop()
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${last}`,
query: ids.length > 0 ? { subs: ids.join(',') } : null
})
},
toSubProcess ({ subProcessCode, subInstanceId }) {
const tarIdentifier =
this.type === 'instance' ? subInstanceId : subProcessCode
const curIdentifier =
this.type === 'instance' ? this.instanceId : this.definitionCode
let subs = []
let olds = this.$route.query.subs
if (olds) {
subs = olds.split(',')
subs.push(curIdentifier)
} else {
subs.push(curIdentifier)
}
let $name = this.$route.name.split('-')
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${tarIdentifier}`,
query: { subs: subs.join(',') }
})
},
seeHistory (taskName) {
this.$router.push({
name: 'task-instance',
query: {
processInstanceId: this.instanceId,
taskName: taskName
}
})
},
/**
* Start dialog
*/
startRunning (taskName) {
this.startTaskName = taskName
this.getStartCheck({ processDefinitionCode: this.definitionCode }).then(
(res) => {
this.startDialog = true
}
)
},
onUpdateStart () {
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
/**
* Task status
*/
refreshTaskStatus () {
const instanceId = this.$route.params.id
this.loading(true)
this.getTaskState(instanceId)
.then((res) => {
this.$message(this.$t('Refresh status succeeded'))
const { taskList } = res.data
const list = res.list
if (taskList) {
this.taskInstances = taskList
taskList.forEach((taskInstance) => {
this.$refs.canvas.setNodeStatus({
code: taskInstance.taskCode,
state: taskInstance.state,
taskInstance
})
})
}
if (list) {
list.forEach((dependent) => {
if (dependent.dependentResult) {
this.setDependResult(JSON.parse(dependent.dependentResult))
}
})
}
})
.finally(() => {
this.loading(false)
})
},
/**
* Loading
* @param {boolean} visible
*/
loading (visible) {
if (visible) {
this.spinner = this.$loading({
lock: true,
text: this.$t('Loading...'),
spinner: 'el-icon-loading',
background: 'rgba(0, 0, 0, 0.4)',
customClass: 'dag-fullscreen-loading'
})
} else {
this.spinner && this.spinner.close()
}
},
/**
* change process definition version
*/
showVersions () {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
code: this.definitionCode
})
.then((res) => {
let processDefinitionVersions = res.data.totalList
let total = res.data.total
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
// this.versionData.processDefinition.id = this.urlParam.id
this.versionData.processDefinition.code = this.definitionCode
this.versionData.processDefinition.version = this.version
this.versionData.processDefinition.releaseState = this.releaseState
this.versionData.processDefinitionVersions =
processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.versionDrawer = true
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
closeVersion () {
this.versionDrawer = false
},
switchProcessVersion ({ version, processDefinitionCode }) {
this.switchProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success($t('Switch Version Successfully'))
this.closeVersion()
this.definitionDetails.init()
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
getProcessVersions ({ pageNo, pageSize, processDefinitionCode }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
code: processDefinitionCode
})
.then((res) => {
this.versionData.processDefinitionVersions = res.data.totalList
this.versionData.total = res.data.total
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
deleteProcessVersion ({ version, processDefinitionCode }) {
this.deleteProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success(res.msg || '')
this.getProcessVersions({
pageNo: 1,
pageSize: 10,
processDefinitionCode: processDefinitionCode
})
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
/**
* Log dialog
*/
closeLogDialog () {
this.logDialog = false
this.logTaskInstance = null
},
showLogDialog (taskDefinitionCode) {
const taskInstance = this.taskInstances.find(taskInstance => {
return taskInstance.taskCode === taskDefinitionCode
})
if (taskInstance) {
this.logTaskInstance = {
id: taskInstance.id,
type: taskInstance.taskType
}
this.logDialog = true
}
}
}
}
</script>
<style lang="scss" scoped>
@import "./dag";
</style>
<style lang="scss">
@import "./loading";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,186 | [Bug] [UI] the page of task group queue is empty | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![图片](https://user-images.githubusercontent.com/26736756/150906562-9c71baf9-70ad-4d2f-9ea7-42d5e1c8e246.png)
when we change the language to english ,this page miss the data.
### What you expected to happen
show the data
### How to reproduce
change the language to english
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8186 | https://github.com/apache/dolphinscheduler/pull/8272 | 6be220129fe214eeb56b9e774ef42c65b78bffa8 | 83d08e013af78288a59be33f10397918b7b17d3d | "2022-01-25T03:40:08Z" | java | "2022-01-31T09:39:18Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/taskGroups/taskGroupQueue/index.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-list-construction :title="$t('Task group queue')">
<template slot="conditions">
<m-conditions @on-conditions="_onConditions">
<template slot="search-group">
<div class="list">
<el-button size="mini" @click="_ckQuery" icon="el-icon-search"></el-button>
</div>
<div class="list">
<el-input v-model="instanceName" style="width: 140px;" size="mini" :placeholder="$t('Process Instance')" clearable></el-input>
</div>
<div class="list">
<el-input v-model="processName" style="width: 160px;" size="mini" :placeholder="$t('Process Name')" clearable></el-input>
</div>
<div class="list">
<el-select style="width: 140px;" v-model="groupId" :placeholder="$t('Task group name')" size="mini" clearable>
<el-option
v-for="taskGroup in taskGroupList"
:key="taskGroup.id"
:value="taskGroup.id"
:label="taskGroup.name">
</el-option>
</el-select>
</div>
</template>
</m-conditions>
</template>
<template slot="content">
<template v-if="taskGroupQueue.length || total>0">
<m-list @on-edit="_onEdit"
@on-force-start="_onForceStart"
@on-edit-priority="_onEditPriority"
:task-group-queue="taskGroupQueue"
:page-no="searchParams.pageNo"
:page-size="searchParams.pageSize">
</m-list>
<div class="page-box">
<el-pagination
background
@current-change="_page"
@size-change="_pageSize"
:page-size="searchParams.pageSize"
:current-page.sync="searchParams.pageNo"
:page-sizes="[10, 30, 50]"
layout="sizes, prev, pager, next, jumper"
:total="total">
</el-pagination>
</div>
</template>
<template v-if="!taskGroupList.length && total<=0">
<m-no-data></m-no-data>
</template>
<m-spin :is-spin="isLoading" :is-left="isLeft"></m-spin>
</template>
</m-list-construction>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mList from './_source/list'
import store from '@/conf/home/store'
import mSpin from '@/module/components/spin/spin'
import mNoData from '@/module/components/noData/noData'
import listUrlParamHandle from '@/module/mixin/listUrlParamHandle'
import mConditions from '@/module/components/conditions/conditions'
import mListConstruction from '@/module/components/listConstruction/listConstruction'
export default {
name: 'task-group-queue-index',
data () {
return {
total: null,
isLoading: true,
modalType: 'create',
taskGroupList: [],
taskGroupQueue: [],
groupId: '',
instanceName: '',
processName: '',
searchParams: {
pageSize: 10,
pageNo: 1
},
isLeft: true,
isADMIN: store.state.user.userInfo.userType === 'ADMIN_USER',
item: {},
createTaskGroupDialog: false
}
},
mixins: [listUrlParamHandle],
props: {},
methods: {
...mapActions('resource', ['getTaskListInTaskGroupQueueById']),
...mapActions('resource', ['getTaskGroupListPaging']),
/**
* Query
*/
_onConditions (o) {
this.searchParams = _.assign(this.searchParams, o)
this.searchParams.pageNo = 1
},
_ckQuery () {
console.log(this.groupId)
this.searchParams.groupId = this.groupId
this.searchParams.instanceName = this.instanceName
this.searchParams.processName = this.processName
this._getList(false)
},
_page (val) {
this.searchParams.pageNo = val
},
_pageSize (val) {
this.searchParams.pageSize = val
},
_onEdit (item) {
this.item = item
this.item.modalType = 'edit'
this.createTaskGroupDialog = true
},
_onForceStart (item) {
this._getList(false)
},
_onEditPriority () {
this._getList(false)
},
_create () {
this.item = { projectOptions: this.projectList, modalType: 'create' }
this.createTaskGroupDialog = true
},
onUpdate () {
this._debounceGET('false')
this.createTaskGroupDialog = false
},
close () {
this.createTaskGroupDialog = false
},
_getList (flag) {
const taskGroupSearchParams = {
pageNo: 1,
pageSize: 2147483647
}
if (sessionStorage.getItem('isLeft') === 0) {
this.isLeft = false
} else {
this.isLeft = true
}
this.isLoading = !flag
this.getTaskGroupListPaging(taskGroupSearchParams).then((values) => {
this.taskGroupList = []
this.taskGroupList = values.totalList
if (this.taskGroupList) {
if (this.searchParams.id) {
this.groupId = _.parseInt(this.searchParams.id)
this.searchParams.groupId = _.parseInt(this.searchParams.id)
}
this.getTaskListInTaskGroupQueueById(this.searchParams).then((res) => {
if (this.searchParams.pageNo > 1 && values.totalList.length === 0) {
this.searchParams.pageNo = this.searchParams.pageNo - 1
} else {
this.taskGroupQueue = []
if (res.data.totalList) {
this.taskGroupQueue = res.data.totalList
}
this.taskGroupQueue.forEach(item => {
const taskGroup = _.find(this.taskGroupList, { id: item.groupId })
if (taskGroup) {
item.taskGroupName = taskGroup.name
}
})
this.total = res.data.total
this.isLoading = false
}
}).catch(e => {
this.isLoading = false
})
}
}).catch(e => {
this.isLoading = false
})
}
},
watch: {
// router
'$route' (a) {
// url no params get instance list
this.searchParams.pageNo = _.isEmpty(a.query) ? 1 : a.query.pageNo
}
},
beforeDestroy () {
sessionStorage.setItem('isLeft', 1)
},
components: { mList, mListConstruction, mConditions, mSpin, mNoData }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,186 | [Bug] [UI] the page of task group queue is empty | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![图片](https://user-images.githubusercontent.com/26736756/150906562-9c71baf9-70ad-4d2f-9ea7-42d5e1c8e246.png)
when we change the language to english ,this page miss the data.
### What you expected to happen
show the data
### How to reproduce
change the language to english
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8186 | https://github.com/apache/dolphinscheduler/pull/8272 | 6be220129fe214eeb56b9e774ef42c65b78bffa8 | 83d08e013af78288a59be33f10397918b7b17d3d | "2022-01-25T03:40:08Z" | java | "2022-01-31T09:39:18Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Force success': 'Force success',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name (required)': 'Please enter name (required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
'Delay execution time': 'Delay execution time',
'Delay execution': 'Delay execution',
'Forced success': 'Forced success',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to go online the process definition': 'Whether to go online the process definition',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Workflow Monitor',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
Success: 'Success',
Failed: 'Failed',
Delete: 'Delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main Class': 'Main Class',
'Main Package': 'Main Package',
'Please enter main package': 'Please enter main package',
'Please enter main class': 'Please enter main class',
'Main Arguments': 'Main Arguments',
'Please enter main arguments': 'Please enter main arguments',
'Option Parameters': 'Option Parameters',
'Please enter option parameters': 'Please enter option parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter the procedure method': 'Please enter the procedure script \n\ncall procedure:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\ncall function:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ',
'The procedure method script example': 'example:{call <procedure-name>[(?,?, ...)]} or {?= call <procedure-name>[(?,?, ...)]}',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver Cores': 'Driver Cores',
'Please enter Driver cores': 'Please enter Driver cores',
'Driver Memory': 'Driver Memory',
'Please enter Driver memory': 'Please enter Driver memory',
'Executor Number': 'Executor Number',
'Please enter Executor number': 'Please enter Executor number',
'The Executor number should be a positive integer': 'The Executor number should be a positive integer',
'Executor Memory': 'Executor Memory',
'Please enter Executor memory': 'Please enter Executor memory',
'Executor Cores': 'Executor Cores',
'Please enter Executor cores': 'Please enter Executor cores',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Core number should be positive integer': 'Core number should be positive integer',
'Flink Version': 'Flink Version',
'JobManager Memory': 'JobManager Memory',
'Please enter JobManager memory': 'Please enter JobManager memory',
'TaskManager Memory': 'TaskManager Memory',
'Please enter TaskManager memory': 'Please enter TaskManager memory',
'Slot Number': 'Slot Number',
'Please enter Slot number': 'Please enter Slot number',
Parallelism: 'Parallelism',
'Custom Parallelism': 'Configure parallelism',
'Please enter Parallelism': 'Please enter Parallelism',
'Parallelism tip': 'If there are a large number of tasks requiring complement, you can use the custom parallelism to ' +
'set the complement task thread to a reasonable value to avoid too large impact on the server.',
'Parallelism number should be positive integer': 'Parallelism number should be positive integer',
'TaskManager Number': 'TaskManager Number',
'Please enter TaskManager number': 'Please enter TaskManager number',
'App Name': 'App Name',
'Please enter app name(optional)': 'Please enter app name(optional)',
'SQL Type': 'SQL Type',
'Send Email': 'Send Email',
'Log display': 'Log display',
'rows of result': 'rows of result',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Switch To This Version': 'Switch To This Version',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Current Version': 'Current Version',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
'Switch Version Successfully': 'Switch Version Successfully',
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Dry-run flag': 'Dry-run flag',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'OS Tenant Code': 'OS Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Yarn Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the os tenant code in English': 'Please enter the os tenant code in English',
'Please enter os tenant code in English': 'Please enter os tenant code in English',
'Please enter os tenant code': 'Please enter os tenant code',
'Please enter tenant Name': 'Please enter tenant Name',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'User Type': 'User Type',
'Please enter phone number': 'Please enter phone number',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'UDF resources directory': 'UDF resources directory',
'Please select UDF resources directory': 'Please select UDF resources directory',
'Alarm group': 'Alarm group',
'Alarm group required': 'Alarm group required',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Create Alarm Instance': 'Create Alarm Instance',
'Edit Alarm Instance': 'Edit Alarm Instance',
'Group Name': 'Group Name',
'Alarm instance name': 'Alarm instance name',
'Alarm plugin name': 'Alarm plugin name',
'Select plugin': 'Select plugin',
'Select Alarm plugin': 'Please select an Alarm plugin',
'Please enter group name': 'Please enter group name',
'Instance parameter exception': 'Instance parameter exception',
'Group Type': 'Group Type',
'Alarm plugin instance': 'Alarm plugin instance',
'Please enter alarm plugin instance name': 'Please enter alarm plugin instance name',
'Select Alarm plugin instance': 'Please select an Alarm plugin instance',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
startDate: 'startDate',
endDate: 'endDate',
Date: 'Date',
Waiting: 'Waiting',
Execution: 'Execution',
Finish: 'Finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
'Upload File Size': 'Upload File size cannot exceed 1g',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
'Version Info': 'Version Info',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
Kill: 'Kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
'Copy path': 'Copy path',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'ReUpload File': 'ReUpload File',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Warning instance manage': 'Warning instance manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
Timezone: 'Timezone',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
AllStatus: 'All',
None: 'None',
Name: 'Name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Waiting Dependent complete': 'Waiting Dependent complete',
'Waiting Dependent start': 'Waiting Dependent start',
'Check interval': 'Check interval',
'Timeout must be longer than check interval': 'Timeout must be longer than check interval',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
'Whether dry-run': 'Whether dry-run',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Yarn Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
Addresses: 'Addresses',
'Worker Addresses': 'Worker Addresses',
'Please select the worker addresses': 'Please select the worker addresses',
'Failure time': 'Failure time',
'Expiration time': 'Expiration time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf',
'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username',
'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'Worker addresses cannot be empty': 'Worker addresses cannot be empty',
'Please generate token': 'Please generate token',
'Please Select token': 'Please select the expiration time of token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
TargetJobName: 'target job name',
'Please enter Pigeon job name': 'Please enter Pigeon job name',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter hive target dir': 'Please enter hive target dir',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'No resources exist': 'No resources exist',
'Please delete all non-existing resources': 'Please delete all non-existing resources',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
Kinship: 'Workflow relationship',
Reset: 'Reset',
KinshipStateActive: 'Current selection',
KinshipState1: 'Online',
KinshipState0: 'Workflow is not online',
KinshipState10: 'Scheduling is not online',
'Dag label display control': 'Dag label display control',
Enable: 'Enable',
Disable: 'Disable',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout': 'Connect Timeout',
'Socket Timeout': 'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
ms: 'ms',
'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077',
Master: 'Master',
'Please select the seatunnel resources': 'Please select the seatunnel resources',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting',
'Batch copy': 'Batch copy',
'Related items': 'Related items',
'Project name is required': 'Project name is required',
'Batch move': 'Batch move',
Version: 'Version',
'Pre tasks': 'Pre tasks',
'Running Memory': 'Running Memory',
'Max Memory': 'Max Memory',
'Min Memory': 'Min Memory',
'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate',
Info: 'Info',
'Datasource userName': 'owner',
'Resource userName': 'owner',
DataQuality: 'DataQuality',
'DataQuality Result': 'DataQuality Result',
'DataQuality Rule': 'DataQuality Rule',
'DataQuality Rule Json': 'Rule Definition',
'Rule Name': 'Rule Name',
Default: 'UnFinished',
Failure: 'Failure',
'Rule Type': 'Rule Type',
'Statistics Value': 'Actual Value',
'Comparison Value': 'Excepted Value',
'Comparison Type': 'Excepted Type',
'Check Type': 'Check Type',
Operator: 'Operator',
Threshold: 'Threshold',
'Single Table': 'Single Table',
'Single Table Custom Sql': 'Single Table Custom Sql',
'Multi Table Accuracy': 'Multi Table Accuracy',
'Multi Table Comparison': 'Multi Table Comparison',
'(Expected - Actual) / Expected': '(Expected - Actual) / Expected x 100%',
'Expected - Actual': 'Expected - Actual',
'Actual - Expected': 'Actual - Expected',
'Actual / Expected': 'Actual / Expected x 100%',
'InputEntry Field': 'InputEntry Field',
'InputEntry Type': 'InputEntry Type',
'InputEntry Title': 'InputEntry Title',
'Error Output Path': 'Error Output Path',
Alert: 'Alert',
Block: 'Block',
'Environment manage': 'Environment manage',
'Create environment': 'Create environment',
'Edit environment': 'Edit environment',
'Environment value': 'Environment value',
'Environment Name': 'Environment Name',
'Environment Code': 'Environment Code',
'Environment Config': 'Environment Config',
'Environment Desc': 'Environment Desc',
'Environment Worker Group': 'Worker Groups',
'Please enter environment config': 'Please enter environment config',
'Please enter environment desc': 'Please enter environment desc',
'Please select worker groups': 'Please select worker groups',
condition: 'condition',
'The condition content cannot be empty': 'The condition content cannot be empty',
'Reference from': 'Reference from',
'No more...': 'No more...',
'Task Definition': 'Task Definition',
'Create task': 'Create task',
'Task Type': 'Task Type',
'Process execute type': 'Process execute type',
parallel: 'parallel',
'Serial wait': 'Serial wait',
'Serial discard': 'Serial discard',
'Serial priority': 'Serial priority',
'Recover serial wait': 'Recover serial wait',
IsEnableProxy: 'Enable Proxy',
WebHook: 'WebHook',
webHook: 'WebHook',
Keyword: 'Keyword',
Secret: 'Secret',
AtMobiles: '@Mobiles',
AtUserIds: '@UserIds',
IsAtAll: '@All',
Proxy: 'Proxy',
receivers: 'Receivers',
receiverCcs: 'ReceiverCcs',
transportProtocol: 'Transport Protocol',
serverHost: 'SMTP Host',
serverPort: 'SMTP Port',
sender: 'Sender',
enableSmtpAuth: 'SMTP Auth',
starttlsEnable: 'SMTP STARTTLS Enable',
sslEnable: 'SMTP SSL Enable',
smtpSslTrust: 'SMTP SSL Trust',
url: 'URL',
requestType: 'Request Type',
headerParams: 'Headers',
bodyParams: 'Body',
contentField: 'Content Field',
path: 'Script Path',
userParams: 'User Params',
corpId: 'CorpId',
secret: 'Secret',
userSendMsg: 'UserSendMsg',
agentId: 'AgentId',
users: 'Users',
Username: 'Username',
username: 'Username',
showType: 'Show Type',
'Please select a task type (required)': 'Please select a task type (required)',
layoutType: 'Layout Type',
gridLayout: 'Grid',
dagreLayout: 'Dagre',
rows: 'Rows',
cols: 'Cols',
processOnline: 'Online',
searchNode: 'Search Node',
dagScale: 'Scale',
workflowName: 'Workflow Name',
scheduleStartTime: 'Schedule Start Time',
scheduleEndTime: 'Schedule End Time',
crontabExpression: 'Crontab',
workflowPublishStatus: 'Workflow Publish Status',
schedulePublishStatus: 'Schedule Publish Status',
'Task group manage': 'Task group manage',
'Task group option': 'Task group option',
'Create task group': 'Create task group',
'Edit task group': 'Edit task group',
'Delete task group': 'Delete task group',
'Task group code': 'Task group code',
'Task group name': 'Task group name',
'Task group resource pool size': 'Resource pool size',
'Task group resource pool size be a number': 'The size of the task group resource pool should be more than 1',
'Task group resource used pool size': 'Used resource',
'Task group desc': 'Task group desc',
'Task group status': 'Task group status',
'Task group enable status': 'Enable',
'Task group disable status': 'Disable',
'Please enter task group desc': 'Please enter task group description',
'Please enter task group resource pool size': 'Please enter task group resource pool size',
'Please select project': 'Please select a project',
'Task group queue': 'Task group queue',
'Task group queue priority': 'Priority',
'Task group queue priority be a number': 'The priority of the task group queue should be a positive number',
'Task group queue force starting status': 'Starting status',
'Task group in queue': 'In queue',
'Task group queue status': 'Task status',
'View task group queue': 'View task group queue',
'Task group queue the status of waiting': 'Waiting into the queue',
'Task group queue the status of queuing': 'Queuing',
'Task group queue the status of releasing': 'Released',
'Modify task group queue priority': 'Edit the priority of the task group queue',
'Priority not empty': 'The value of priority can not be empty',
'Priority must be number': 'The value of priority should be number',
'Please select task name': 'Please select a task name',
'Audit Log': 'Audit Log',
'Resource Type': 'resource type',
'All Types': 'all types',
'All Modules': 'all modules',
'All Operations': 'all operations',
'User Audit': 'user management audit',
'Project Audit': 'project management audit',
'Create Operation': 'create',
'Update Operation': 'update',
'Delete Operation': 'delete',
'Read Operation': 'read',
'Process State': 'Process State',
'Upstream Tasks': 'Upstream Tasks',
'and {n} more': '… and {n} more',
'Move task': 'Move task',
'Delete task {taskName} from process {processName}?': 'Delete task {taskName} from process {processName}?',
'Delete task completely': 'Delete task completely',
'Please select a process': 'Please select a process',
'Delete {taskName}?': 'Delete {taskName}?',
'Please select a process (required)': 'Please select a process (required)',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
null_check: 'NullCheck',
custom_sql: 'CustomSql',
multi_table_accuracy: 'MulTableAccuracy',
multi_table_value_comparison: 'MulTableCompare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
All: 'All',
FixValue: 'FixValue',
DailyFluctuation: 'DailyFluctuation',
WeeklyFluctuation: 'WeeklyFluctuation',
MonthlyFluctuation: 'MonthlyFluctuation',
Last7DayFluctuation: 'Last7DayFluctuation',
Last30DayFluctuation: 'Last30DayFluctuation',
SrcTableTotalRows: 'SrcTableTotalRows',
TargetTableTotalRows: 'TargetTableTotalRows'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,235 | [Bug] [SQL] Updating the fields of priority and force_start in the file of TaskGroupQueueMapper.xml is wrong. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I modified the priority's value of a task, there was a prompt about failing to updating. So I checked out the related codes, I found that updating the fields of priority and force_start in the file of TaskGroupQueueMapper.xml is wrong.
![image](https://user-images.githubusercontent.com/4928204/151483121-11432f8a-8441-4545-94d6-c58d54d57adb.png)
![image](https://user-images.githubusercontent.com/4928204/151482916-ca33f15a-c358-420e-89c9-f3f41e618c89.png)
### What you expected to happen
I expect that I can modify the priority of a task.
### How to reproduce
Try it on the page of task group queue.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8235 | https://github.com/apache/dolphinscheduler/pull/8273 | 83d08e013af78288a59be33f10397918b7b17d3d | 73333486321adc76fdb0436cf54570c034b42e07 | "2022-01-28T03:40:03Z" | java | "2022-01-31T09:41:48Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper">
<resultMap type="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue" id="TaskGroupQueueMap">
<result property="id" column="id" jdbcType="INTEGER"/>
<result property="taskId" column="task_id" jdbcType="INTEGER"/>
<result property="taskName" column="task_name" jdbcType="VARCHAR"/>
<result property="groupId" column="group_id" jdbcType="INTEGER"/>
<result property="processId" column="process_id" jdbcType="INTEGER"/>
<result property="priority" column="priority" jdbcType="INTEGER"/>
<result property="status" column="status" jdbcType="INTEGER"/>
<result property="forceStart" column="force_start" jdbcType="INTEGER"/>
<result property="inQueue" column="in_queue" jdbcType="INTEGER"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
<result property="updateTime" column="update_time" jdbcType="TIMESTAMP"/>
</resultMap>
<sql id = "baseSql">
id, task_id, task_name, group_id, process_id, priority, status , force_start , in_queue, create_time, update_time
</sql>
<select id="queryTaskGroupQueuePaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql">
</include>
from t_ds_task_group_queue
<where>
<if test="groupId != 0">
and group_id = #{groupId}
</if>
</where>
</select>
<select id="queryByStatus" resultMap="TaskGroupQueueMap" resultType="map">
select
<include refid="baseSql">
</include>
from t_ds_task_group_queue
where status = #{status}
</select>
<delete id="deleteByTaskId">
delete from t_ds_task_group_queue
where task_id = #{taskId}
</delete>
<update id="updateStatusByTaskId">
update t_ds_task_group_queue
<set>
<if test="status != 0">
status = #{status},
</if>
</set>
where task_id = #{taskId}
</update>
<update id="updateInQueue">
update t_ds_task_group_queue
set in_queue = #{inQueue}
where id = #{id}
</update>
<update id="updateForceStart">
update t_ds_task_group_queue
set priority = #{priority}
where id = #{queueId}
</update>
<update id="modifyPriority">
update t_ds_task_group_queue
set force_start = #{forceStart}
where id = #{queueId}
</update>
<update id="updateInQueueLimit1">
update t_ds_task_group_queue
set in_queue = #{newValue}
where group_id = #{groupId} and in_queue = #{oldValue} and status = #{status} order by priority desc limit 1
</update>
<update id="updateInQueueCAS">
update t_ds_task_group_queue
set in_queue = #{newValue}
where id = #{id} and in_queue = #{oldValue}
</update>
<select id="queryHighPriorityTasks" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where group_id = #{groupId} and status = #{status} and priority > #{priority}
</select>
<select id="queryTheHighestPriorityTasks" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where priority = (select max(priority) from t_ds_task_group_queue where group_id = #{groupId}
and status = #{status} and in_queue = #{inQueue} and force_start = #{forceStart} ) and group_id = #{groupId}
and status = #{status} and in_queue = #{inQueue} and force_start = #{forceStart} limit 1
</select>
<select id="queryByTaskId" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where task_id = #{taskId}
</select>
<select id="queryTaskGroupQueueByTaskGroupIdPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
queue.id, queue.task_name, queue.group_id, queue.process_id, queue.priority, queue.status
, queue.force_start, queue.create_time, queue.update_time,
process.name as processInstanceName,p.name as projectName,p.code as projectCode
from t_ds_task_group_queue queue
left join t_ds_process_instance process on queue.process_id = process.id
left join t_ds_process_definition p_f on process.process_definition_code = p_f.code
and process.process_definition_version = p_f.version
join t_ds_project as p on p_f.project_code = p.code
where queue.group_id = #{groupId}
<if test="taskName != null and taskName != ''">
and task_name =#{taskName}
</if>
<if test="processName != null and processName != ''">
and process.name =#{processName}
</if>
<if test="status != null">
and queue.status =#{status}
</if>
<if test="projects != null and projects.size() > 0">
and p.code in
<foreach collection="projects" index="index" item="i" open="(" separator="," close=")">
#{i.code}
</foreach>
</if>
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,073 | [Bug] [API] The result of querying the task groups is disordered after editing a task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
After editing a task group and saving it, the list of querying task groups was disordered. It's because that the sql doesn't use the clause of 'order by' to order any fields in the mapper of 'task group'.
The details are as follows:
![image](https://user-images.githubusercontent.com/4928204/149663169-eef62dd0-95d1-4de7-917b-7047a5948d57.png)
![image](https://user-images.githubusercontent.com/4928204/149663174-db8b6765-a6b2-4f48-a5c7-657c0262f87d.png)
### What you expected to happen
I expect that the list of querying task groups are ordered.
### How to reproduce
Just try to modify a task group and save it. Then you will observe that problem.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8073 | https://github.com/apache/dolphinscheduler/pull/8274 | 1f87eb67609d52fa0d5cf780e406572458b17836 | 6ae3efb5b6caaa72437c45bdf80c3f1b87c76a67 | "2022-01-16T14:02:51Z" | java | "2022-02-04T05:23:36Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupMapper.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper">
<resultMap type="org.apache.dolphinscheduler.dao.entity.TaskGroup" id="TaskGroupMap">
<result property="id" column="id" jdbcType="INTEGER"/>
<result property="name" column="name" jdbcType="VARCHAR"/>
<result property="description" column="description" jdbcType="VARCHAR"/>
<result property="groupSize" column="group_size" jdbcType="INTEGER"/>
<result property="useSize" column="use_size" jdbcType="INTEGER"/>
<result property="userId" column="user_id" jdbcType="INTEGER"/>
<result property="projectCode" column="project_code" jdbcType="INTEGER"/>
<result property="status" column="status" jdbcType="INTEGER"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
<result property="updateTime" column="update_time" jdbcType="TIMESTAMP"/>
</resultMap>
<sql id = "baseSql">
id,name,description,project_code,group_size,use_size,status,create_time,update_time
</sql>
<select id="queryTaskGroupPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroup">
select
<include refid="baseSql">
</include>
from t_ds_task_group
<where>
<if test="userId != 0">
and user_id = #{userId}
</if>
<if test="status != null">
and status = #{status}
</if>
<if test="name != null and name != '' ">
and name like concat('%', #{name}, '%')
</if>
</where>
</select>
<select id="queryTaskGroupPagingByProjectCode" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroup">
select
<include refid="baseSql">
</include>
from t_ds_task_group
where project_code in ( #{projectCode} , 0)
</select>
<!--modify data by id-->
<update id="updateTaskGroupResource">
update t_ds_task_group
set use_size = use_size+1
where id = #{id} and use_size < group_size and
(select count(1) FROM t_ds_task_group_queue where id = #{queueId} and status = #{queueStatus} ) = 1
</update>
<!--modify data by id-->
<update id="releaseTaskGroupResource">
update t_ds_task_group
set use_size = use_size-1
where id = #{id} and use_size > 0 and
(select count(1) FROM t_ds_task_group_queue where id = #{queueId} and status = #{queueStatus} ) = 1
</update>
<select id="queryByName" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroup">
select
<include refid="baseSql" />
from t_ds_task_group
where
user_id = #{userId} and name = #{name}
</select>
<select id="selectAvailableCountById" resultType="java.lang.Integer">
select
count(1)
from t_ds_task_group
where
id = #{groupId} and use_size < group_size
</select>
<select id="selectCountByIdStatus" resultType="java.lang.Integer">
select
count(1)
from t_ds_task_group
where
id = #{id} and status = #{status}
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,073 | [Bug] [API] The result of querying the task groups is disordered after editing a task group. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
After editing a task group and saving it, the list of querying task groups was disordered. It's because that the sql doesn't use the clause of 'order by' to order any fields in the mapper of 'task group'.
The details are as follows:
![image](https://user-images.githubusercontent.com/4928204/149663169-eef62dd0-95d1-4de7-917b-7047a5948d57.png)
![image](https://user-images.githubusercontent.com/4928204/149663174-db8b6765-a6b2-4f48-a5c7-657c0262f87d.png)
### What you expected to happen
I expect that the list of querying task groups are ordered.
### How to reproduce
Just try to modify a task group and save it. Then you will observe that problem.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8073 | https://github.com/apache/dolphinscheduler/pull/8274 | 1f87eb67609d52fa0d5cf780e406572458b17836 | 6ae3efb5b6caaa72437c45bdf80c3f1b87c76a67 | "2022-01-16T14:02:51Z" | java | "2022-02-04T05:23:36Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper">
<resultMap type="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue" id="TaskGroupQueueMap">
<result property="id" column="id" jdbcType="INTEGER"/>
<result property="taskId" column="task_id" jdbcType="INTEGER"/>
<result property="taskName" column="task_name" jdbcType="VARCHAR"/>
<result property="groupId" column="group_id" jdbcType="INTEGER"/>
<result property="processId" column="process_id" jdbcType="INTEGER"/>
<result property="priority" column="priority" jdbcType="INTEGER"/>
<result property="status" column="status" jdbcType="INTEGER"/>
<result property="forceStart" column="force_start" jdbcType="INTEGER"/>
<result property="inQueue" column="in_queue" jdbcType="INTEGER"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
<result property="updateTime" column="update_time" jdbcType="TIMESTAMP"/>
</resultMap>
<sql id = "baseSql">
id, task_id, task_name, group_id, process_id, priority, status , force_start , in_queue, create_time, update_time
</sql>
<select id="queryTaskGroupQueuePaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql">
</include>
from t_ds_task_group_queue
<where>
<if test="groupId != 0">
and group_id = #{groupId}
</if>
</where>
</select>
<select id="queryByStatus" resultMap="TaskGroupQueueMap" resultType="map">
select
<include refid="baseSql">
</include>
from t_ds_task_group_queue
where status = #{status}
</select>
<delete id="deleteByTaskId">
delete from t_ds_task_group_queue
where task_id = #{taskId}
</delete>
<update id="updateStatusByTaskId">
update t_ds_task_group_queue
<set>
<if test="status != 0">
status = #{status},
</if>
</set>
where task_id = #{taskId}
</update>
<update id="updateInQueue">
update t_ds_task_group_queue
set in_queue = #{inQueue}
where id = #{id}
</update>
<update id="updateForceStart">
update t_ds_task_group_queue
set force_start = #{forceStart}
where id = #{queueId}
</update>
<update id="modifyPriority">
update t_ds_task_group_queue
set priority = #{priority}
where id = #{queueId}
</update>
<update id="updateInQueueLimit1">
update t_ds_task_group_queue
set in_queue = #{newValue}
where group_id = #{groupId} and in_queue = #{oldValue} and status = #{status} order by priority desc limit 1
</update>
<update id="updateInQueueCAS">
update t_ds_task_group_queue
set in_queue = #{newValue}
where id = #{id} and in_queue = #{oldValue}
</update>
<select id="queryHighPriorityTasks" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where group_id = #{groupId} and status = #{status} and priority > #{priority}
</select>
<select id="queryTheHighestPriorityTasks" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where priority = (select max(priority) from t_ds_task_group_queue where group_id = #{groupId}
and status = #{status} and in_queue = #{inQueue} and force_start = #{forceStart} ) and group_id = #{groupId}
and status = #{status} and in_queue = #{inQueue} and force_start = #{forceStart} limit 1
</select>
<select id="queryByTaskId" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
<include refid="baseSql" />
from t_ds_task_group_queue
where task_id = #{taskId}
</select>
<select id="queryTaskGroupQueueByTaskGroupIdPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskGroupQueue">
select
queue.id, queue.task_name, queue.group_id, queue.process_id, queue.priority, queue.status
, queue.force_start, queue.create_time, queue.update_time,
process.name as processInstanceName,p.name as projectName,p.code as projectCode
from t_ds_task_group_queue queue
left join t_ds_process_instance process on queue.process_id = process.id
left join t_ds_process_definition p_f on process.process_definition_code = p_f.code
and process.process_definition_version = p_f.version
join t_ds_project as p on p_f.project_code = p.code
where queue.group_id = #{groupId}
<if test="taskName != null and taskName != ''">
and task_name =#{taskName}
</if>
<if test="processName != null and processName != ''">
and process.name =#{processName}
</if>
<if test="status != null">
and queue.status =#{status}
</if>
<if test="projects != null and projects.size() > 0">
and p.code in
<foreach collection="projects" index="index" item="i" open="(" separator="," close=")">
#{i.code}
</foreach>
</if>
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,163 | [Bug] [UI] The task name is not displayed completely | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The names of the people in the workflow cannot be displayed in full and cannot be searched.
Most of our workflows have a lot of tasks, which can be cumbersome.
![image](https://user-images.githubusercontent.com/18095523/150718578-adc14a90-b860-485e-a41d-3b751491b70e.png)
### What you expected to happen
The task name can be displayed in its entirety.
### How to reproduce
see "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8163 | https://github.com/apache/dolphinscheduler/pull/8280 | 6ae3efb5b6caaa72437c45bdf80c3f1b87c76a67 | 41efe305bb40097bf5cfd4c948d49ff787a4634c | "2022-01-24T03:43:18Z" | java | "2022-02-04T08:41:16Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="dag-canvas">
<dag-taskbar @on-drag-start="onDragStart" />
<div
class="dag-container"
ref="container"
@dragenter.prevent
@dragover.prevent
@dragleave.prevent
@drop.stop.prevent="onDrop"
>
<div ref="paper" class="paper"></div>
<div ref="minimap" class="minimap"></div>
<div class="scale-slider">
<span class="scale-title">{{$t('dagScale')}}</span>
<el-slider
v-model="scale"
vertical
:max="2"
:min="0.2"
:step="0.2"
:marks="SCALE_MARKS"
@input='scaleChange'
/>
</div>
<context-menu ref="contextMenu" />
</div>
<layout-config-modal ref="layoutModal" @submit="format" />
</div>
</template>
<script>
import _ from 'lodash'
import { Graph, DataUri } from '@antv/x6'
import dagTaskbar from './taskbar.vue'
import contextMenu from './contextMenu.vue'
import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue'
import {
NODE,
EDGE,
X6_NODE_NAME,
X6_EDGE_NAME,
NODE_STATUS_MARKUP
} from './x6-helper'
import { DagreLayout, GridLayout } from '@antv/layout'
import { tasksType, tasksState } from '../config'
import { mapActions, mapMutations, mapState } from 'vuex'
import nodeStatus from './nodeStatus'
import x6StyleMixin from './x6-style-mixin'
const SCALE_MARKS = {
0.2: '0.2',
1: '1',
2: '2'
}
export default {
name: 'dag-canvas',
data () {
return {
graph: null,
// Used to calculate the context menu location
originalScrollPosition: {
left: 0,
top: 0
},
editable: true,
dragging: {
// Distance from the mouse to the top-left corner of the dragging element
x: 0,
y: 0,
type: ''
},
// The canvas scale
scale: 1,
SCALE_MARKS
}
},
provide () {
return {
dagCanvas: this
}
},
mixins: [x6StyleMixin],
inject: ['dagChart'],
components: {
dagTaskbar,
contextMenu,
layoutConfigModal
},
computed: {
...mapState('dag', ['tasks'])
},
methods: {
...mapActions('dag', ['genTaskCodeList']),
...mapMutations('dag', ['removeTask']),
/**
* Recalculate the paper width and height
*/
paperResize () {
const w = this.$el.offsetWidth
const h = this.$el.offsetHeight
this.graph.resize(w, h)
},
/**
* Init graph
* This will be called in the dag-chart mounted event
* @param {boolean} uneditable
*/
graphInit (editable) {
const self = this
this.editable = !!editable
const paper = this.$refs.paper
const minimap = this.$refs.minimap
const graph = (this.graph = new Graph({
container: paper,
selecting: {
enabled: true,
multiple: true,
rubberband: true,
rubberEdge: true,
movable: true,
showNodeSelectionBox: false
},
scaling: {
min: 0.2,
max: 2
},
mousewheel: {
enabled: true,
modifiers: ['ctrl', 'meta']
},
scroller: true,
grid: {
size: 10,
visible: true
},
snapline: true,
minimap: {
enabled: true,
container: minimap,
scalable: false,
width: 200,
height: 120
},
interacting: {
edgeLabelMovable: false,
nodeMovable: !!editable,
magnetConnectable: !!editable
},
connecting: {
// Whether multiple edges can be created between the same start node and end
allowMulti: false,
// Whether a point is allowed to connect to a blank position on the canvas
allowBlank: false,
// The start node and the end node are the same node
allowLoop: false,
// Whether an edge is allowed to link to another edge
allowEdge: false,
// Whether edges are allowed to link to nodes
allowNode: true,
// Whether to allow edge links to ports
allowPort: false,
// Whether all available ports or nodes are highlighted when you drag the edge
highlight: true,
createEdge () {
return graph.createEdge({ shape: X6_EDGE_NAME })
},
validateConnection (data) {
const { sourceCell, targetCell } = data
if (
sourceCell &&
targetCell &&
sourceCell.isNode() &&
targetCell.isNode()
) {
const edgeData = {
sourceId: Number(sourceCell.id),
targetId: Number(targetCell.id)
}
if (!self.edgeIsValid(edgeData)) {
return false
}
}
return true
}
},
highlighting: {
nodeAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAdsorbed: {
name: 'className',
args: {
className: 'adsorbed'
}
}
}
}))
this.registerX6Shape()
this.bindGraphEvent()
this.originalScrollPosition = graph.getScrollbarPosition()
},
/**
* Register custom shapes
*/
registerX6Shape () {
Graph.unregisterNode(X6_NODE_NAME)
Graph.unregisterEdge(X6_EDGE_NAME)
Graph.registerNode(X6_NODE_NAME, { ...NODE })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE })
},
/**
* Bind grap event
*/
bindGraphEvent () {
this.bindStyleEvent(this.graph)
// update scale bar
this.graph.on('scale', ({ sx }) => {
this.scale = sx
})
// right click
this.graph.on('node:contextmenu', ({ x, y, cell }) => {
const { left, top } = this.graph.getScrollbarPosition()
const o = this.originalScrollPosition
this.$refs.contextMenu.show(x + (o.left - left), y + (o.top - top))
this.$refs.contextMenu.setCurrentTask({
name: cell.data.taskName,
type: cell.data.taskType,
code: Number(cell.id)
})
})
// node double click
this.graph.on('node:dblclick', ({ cell }) => {
this.dagChart.openFormModel(Number(cell.id), cell.data.taskType)
})
// create edge label
this.graph.on('edge:dblclick', ({ cell }) => {
const labelName = this.getEdgeLabelName(cell)
this.dagChart.$refs.edgeEditModel.show({
id: cell.id,
label: labelName
})
})
// Make sure the edge starts with node, not port
this.graph.on('edge:connected', ({ isNew, edge }) => {
if (isNew) {
const sourceNode = edge.getSourceNode()
edge.setSource(sourceNode)
}
})
},
/**
* @param {Edge|string} edge
*/
getEdgeLabelName (edge) {
if (typeof edge === 'string') edge = this.graph.getCellById(edge)
const labels = edge.getLabels()
const labelName = _.get(labels, ['0', 'attrs', 'label', 'text'], '')
return labelName
},
/**
* Set edge label by id
* @param {string} id
* @param {string} label
*/
setEdgeLabel (id, label) {
const edge = this.graph.getCellById(id)
edge.setLabels(label)
},
/**
* @param {number} limit
* @param {string} text
* Each Chinese character is equal to two chars
*/
truncateText (text, n) {
const exp = /[\u4E00-\u9FA5]/
let res = ''
let len = text.length
let chinese = text.match(new RegExp(exp, 'g'))
if (chinese) {
len += chinese.length
}
if (len > n) {
let i = 0
let acc = 0
while (true) {
let char = text[i]
if (exp.test(char)) {
acc += 2
} else {
acc++
}
if (acc > n) break
res += char
i++
}
res += '...'
} else {
res = text
}
return res
},
/**
* Set node name by id
* @param {string|number} id
* @param {string} name
*/
setNodeName (id, name) {
id += ''
const node = this.graph.getCellById(id)
if (node) {
const truncation = this.truncateText(name, 18)
node.attr('title/text', truncation)
node.setData({ taskName: name })
}
},
/**
* Convert the graph to JSON
* @return {{cells:Cell[]}}
*/
toJSON () {
return this.graph.toJSON()
},
/**
* Generate graph with JSON
*/
fromJSON (json) {
this.graph.fromJSON(json)
},
/**
* getNodes
* @return {Node[]}
*/
// interface Node {
// id: number;
// position: {x:number;y:number};
// data: {taskType:string;taskName:string;}
// }
getNodes () {
const nodes = this.graph.getNodes()
return nodes.map((node) => {
const position = node.getPosition()
const data = node.getData()
return {
id: Number(node.id),
position: position,
data: data
}
})
},
/**
* getEdges
* @return {Edge[]} Edge is inherited from the Cell
*/
// interface Edge {
// id: string;
// label: string;
// sourceId: number;
// targetId: number;
// }
getEdges () {
const edges = this.graph.getEdges()
return edges.map((edge) => {
const labelData = edge.getLabelAt(0)
return {
id: edge.id,
label: _.get(labelData, ['attrs', 'label', 'text'], ''),
sourceId: Number(edge.getSourceCellId()),
targetId: Number(edge.getTargetCellId())
}
})
},
/**
* downloadPNG
* @param {string} filename
*/
downloadPNG (fileName = 'chart') {
this.graph.toPNG(
(dataUri) => {
DataUri.downloadDataUri(dataUri, `${fileName}.png`)
},
{
padding: {
top: 50,
right: 50,
bottom: 50,
left: 50
},
backgroundColor: '#f2f3f7'
}
)
},
showLayoutModal () {
const layoutModal = this.$refs.layoutModal
if (layoutModal) {
layoutModal.show()
}
},
/**
* format
* @desc Auto layout use @antv/layout
*/
format (layoutConfig) {
if (!layoutConfig) {
layoutConfig = DEFAULT_LAYOUT_CONFIG
}
this.graph.cleanSelection()
let layoutFunc = null
if (layoutConfig.type === LAYOUT_TYPE.DAGRE) {
layoutFunc = new DagreLayout({
type: LAYOUT_TYPE.DAGRE,
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return layoutConfig.ranksep + max
},
nodesep: layoutConfig.nodesep,
controlPoints: true
})
} else if (layoutConfig.type === LAYOUT_TYPE.GRID) {
layoutFunc = new GridLayout({
type: LAYOUT_TYPE.GRID,
preventOverlap: true,
preventOverlapPadding: layoutConfig.padding,
sortBy: '_index',
rows: layoutConfig.rows || undefined,
cols: layoutConfig.cols || undefined,
nodeSize: 220
})
}
const json = this.toJSON()
const nodes = json.cells
.filter((cell) => cell.shape === X6_NODE_NAME)
.map((item) => {
return {
...item,
// sort by code aesc
_index: -item.id
}
})
const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME)
const newModel = layoutFunc.layout({
nodes: nodes,
edges: edges
})
this.fromJSON(newModel)
},
/**
* add a node to the graph
* @param {string|number} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
addNode (id, taskType, coordinate = { x: 100, y: 100 }) {
id += ''
if (!tasksType[taskType]) {
console.warn(`taskType:${taskType} is invalid!`)
return
}
const node = this.genNodeJSON(id, taskType, '', coordinate)
this.graph.addNode(node)
},
/**
* generate node json
* @param {number|string} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
genNodeJSON (id, taskType, taskName, coordinate = { x: 100, y: 100 }) {
id += ''
const url = require(`../images/task-icos/${taskType.toLocaleLowerCase()}.png`)
const truncation = taskName ? this.truncateText(taskName, 18) : id
return {
id: id,
shape: X6_NODE_NAME,
x: coordinate.x,
y: coordinate.y,
data: {
taskType: taskType,
taskName: taskName
},
attrs: {
image: {
// Use href instead of xlink:href, you may lose the icon when downloadPNG
'xlink:href': url
},
title: {
text: truncation
}
}
}
},
/**
* generate edge json
* @param {number|string} sourceId
* @param {number|string} targetId
* @param {string} label
*/
genEdgeJSON (sourceId, targetId, label = '') {
sourceId += ''
targetId += ''
return {
shape: X6_EDGE_NAME,
source: {
cell: sourceId
},
target: {
cell: targetId
},
labels: label ? [label] : undefined
}
},
/**
* remove a node
* @param {string|number} id NodeId
*/
removeNode (id) {
id += ''
this.graph.removeNode(id)
this.removeTask(+id)
},
/**
* remove an edge
* @param {string} id EdgeId
*/
removeEdge (id) {
this.graph.removeEdge(id)
},
/**
* remove multiple cells
* @param {Cell[]} cells
*/
removeCells (cells) {
this.graph.removeCells(cells)
cells.forEach((cell) => {
if (cell.isNode()) {
this.removeTask(+cell.id)
}
})
},
/**
* Verify whether edge is valid
* The number of edges start with CONDITIONS task cannot be greater than 2
*/
edgeIsValid (edge) {
const { sourceId } = edge
const sourceTask = this.tasks.find((task) => task.code === sourceId)
if (sourceTask.taskType === 'CONDITIONS') {
const edges = this.getEdges()
return edges.filter((e) => e.sourceId === sourceTask.code).length <= 2
}
return true
},
/**
* Gets the current selections
* @return {Cell[]}
*/
getSelections () {
return this.graph.getSelectedCells()
},
/**
* Lock scroller
*/
lockScroller () {
this.graph.lockScroller()
},
/**
* Unlock scroller
*/
unlockScroller () {
this.graph.unlockScroller()
},
/**
* set node status icon
* @param {number} code
* @param {string} state
*/
setNodeStatus ({ code, state, taskInstance }) {
code += ''
const stateProps = tasksState[state]
const node = this.graph.getCellById(code)
if (node) {
// Destroy the previous dom
node.removeMarkup()
node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP))
const nodeView = this.graph.findViewByCell(node)
const el = nodeView.find('div')[0]
nodeStatus({
stateProps,
taskInstance
}).$mount(el)
}
},
/**
* Drag && Drop Event
*/
onDragStart (e, taskType) {
if (!this.editable) {
e.preventDefault()
return
}
this.dragging = {
x: e.offsetX,
y: e.offsetY,
type: taskType.name
}
},
onDrop (e) {
const { type } = this.dragging
const { x, y } = this.calcGraphCoordinate(e.clientX, e.clientY)
this.genTaskCodeList({
genNum: 1
})
.then((res) => {
const [code] = res
this.addNode(code, type, { x, y })
this.dagChart.openFormModel(code, type)
})
.catch((err) => {
console.error(err)
})
},
calcGraphCoordinate (mClientX, mClientY) {
// Distance from the mouse to the top-left corner of the container;
const { left: cX, top: cY } =
this.$refs.container.getBoundingClientRect()
const mouseX = mClientX - cX
const mouseY = mClientY - cY
// The distance that paper has been scrolled
const { left: sLeft, top: sTop } = this.graph.getScrollbarPosition()
const { left: oLeft, top: oTop } = this.originalScrollPosition
const scrollX = sLeft - oLeft
const scrollY = sTop - oTop
// Distance from the mouse to the top-left corner of the dragging element;
const { x: eX, y: eY } = this.dragging
return {
x: mouseX + scrollX - eX,
y: mouseY + scrollY - eY
}
},
/**
* Get prev nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPrevNodes(node2.code) => [node1]
*/
getPrevNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.targetId === code)
.map((edge) => nodesMap[edge.sourceId])
},
/**
* set prev nodes
* @param {number} code
* @param {number[]} preNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPreNodes (code, preNodeCodes, override) {
const edges = this.getEdges()
const currPreCodes = []
edges.forEach((edge) => {
if (edge.targetId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPreCodes.push(edge.sourceId)
}
}
})
preNodeCodes.forEach((preCode) => {
if (currPreCodes.includes(preCode) || preCode === code) return
const edge = this.genEdgeJSON(preCode, code)
this.graph.addEdge(edge)
})
},
/**
* Get post nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPostNodes(node2.code) => [node3]
*/
getPostNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.sourceId === code)
.map((edge) => nodesMap[edge.targetId])
},
/**
* set post nodes
* @param {number} code
* @param {number[]} postNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPostNodes (code, postNodeCodes, override) {
const edges = this.getEdges()
const currPostCodes = []
edges.forEach((edge) => {
if (edge.sourceId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPostCodes.push(edge.targetId)
}
}
})
postNodeCodes.forEach((postCode) => {
if (currPostCodes.includes(postCode) || postCode === code) return
const edge = this.genEdgeJSON(code, postCode)
this.graph.addEdge(edge)
})
},
/**
* Navigate to cell
* @param {string} taskName
*/
navigateTo (taskName) {
const nodes = this.getNodes()
nodes.forEach((node) => {
if (node.data.taskName === taskName) {
const id = node.id
const cell = this.graph.getCellById(id)
this.graph.scrollToCell(cell, { animation: { duration: 600 } })
this.graph.cleanSelection()
this.graph.select(cell)
}
})
},
/**
* Canvas scale
*/
scaleChange (val) {
this.graph.zoomTo(val)
}
}
}
</script>
<style lang="scss" scoped>
@import "./canvas";
</style>
<style lang="scss">
@import "./x6-style";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE;
import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES;
import org.apache.dolphinscheduler.alert.api.AlertChannel;
import org.apache.dolphinscheduler.alert.api.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.params.PasswordParam;
import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.params.base.Validate;
import org.apache.dolphinscheduler.spi.params.input.InputParam;
import org.apache.dolphinscheduler.spi.params.radio.RadioParam;
import java.util.Arrays;
import java.util.List;
import com.google.auto.service.AutoService;
@AutoService(AlertChannelFactory.class)
public final class DingTalkAlertChannelFactory implements AlertChannelFactory {
@Override
public String name() {
return "DingTalk";
}
@Override
public List<PluginParams> params() {
InputParam webHookParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, DingTalkParamsConstants.DING_TALK_WEB_HOOK)
.addValidate(Validate.newBuilder()
.setRequired(true)
.build())
.build();
InputParam keywordParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, DingTalkParamsConstants.DING_TALK_KEYWORD)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam secretParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_SECRET, DingTalkParamsConstants.DING_TALK_SECRET)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam atMobilesParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_MOBILES, DingTalkParamsConstants.DING_TALK_AT_MOBILES)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam atUserIdsParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_USERIDS, DingTalkParamsConstants.DING_TALK_AT_USERIDS)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
RadioParam isAtAll = RadioParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_ALL, DingTalkParamsConstants.DING_TALK_AT_ALL)
.addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(STRING_FALSE)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
RadioParam isEnableProxy = RadioParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.DING_TALK_PROXY_ENABLE)
.addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false))
.addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false))
.setValue(STRING_FALSE)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam proxyParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY, DingTalkParamsConstants.DING_TALK_PROXY)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam portParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PORT, DingTalkParamsConstants.DING_TALK_PORT)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
InputParam userParam = InputParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_USER, DingTalkParamsConstants.DING_TALK_USER)
.addValidate(Validate.newBuilder()
.setRequired(false)
.build())
.build();
PasswordParam passwordParam = PasswordParam
.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, DingTalkParamsConstants.DING_TALK_PASSWORD)
.setPlaceholder("if enable use authentication, you need input password")
.build();
return Arrays.asList(webHookParam, keywordParam, secretParam, atMobilesParam, atUserIdsParam, isAtAll, isEnableProxy, proxyParam, portParam, userParam, passwordParam);
}
@Override
public AlertChannel create() {
return new DingTalkAlertChannel();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
public final class DingTalkParamsConstants {
static final String DING_TALK_PROXY_ENABLE = "$t('isEnableProxy')";
static final String NAME_DING_TALK_PROXY_ENABLE = "IsEnableProxy";
static final String DING_TALK_WEB_HOOK = "$t('webhook')";
static final String NAME_DING_TALK_WEB_HOOK = "WebHook";
static final String DING_TALK_KEYWORD = "$t('keyword')";
static final String NAME_DING_TALK_KEYWORD = "Keyword";
static final String DING_TALK_SECRET = "$t('secret')";
static final String NAME_DING_TALK_SECRET = "Secret";
static final String DING_TALK_AT_MOBILES = "$t('atMobiles')";
static final String NAME_DING_TALK_AT_MOBILES = "AtMobiles";
static final String DING_TALK_AT_USERIDS = "$t('atUserIds')";
static final String NAME_DING_TALK_AT_USERIDS = "AtUserIds";
static final String DING_TALK_AT_ALL = "$t('isAtAll')";
static final String NAME_DING_TALK_AT_ALL = "IsAtAll";
static final String DING_TALK_PROXY = "$t('proxy')";
static final String NAME_DING_TALK_PROXY = "Proxy";
static final String DING_TALK_PORT = "$t('port')";
static final String NAME_DING_TALK_PORT = "Port";
static final String DING_TALK_USER = "$t('user')";
static final String NAME_DING_TALK_USER = "User";
static final String DING_TALK_PASSWORD = "$t('password')";
static final String NAME_DING_TALK_PASSWORD = "Password";
private DingTalkParamsConstants() {
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import org.apache.dolphinscheduler.alert.api.AlertResult;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* https://open.dingtalk.com/document/robots/custom-robot-access
* https://open.dingtalk.com/document/robots/customize-robot-security-settings
* </p>
*/
public final class DingTalkSender {
private static final Logger logger = LoggerFactory.getLogger(DingTalkSender.class);
private final String url;
private final String keyword;
private final String secret;
private final String atMobiles;
private final String atUserIds;
private final Boolean atAll;
private final Boolean enableProxy;
private String proxy;
private Integer port;
private String user;
private String password;
DingTalkSender(Map<String, String> config) {
url = config.get(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK);
keyword = config.get(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD);
secret = config.get(DingTalkParamsConstants.NAME_DING_TALK_SECRET);
atMobiles = config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_MOBILES);
atUserIds = config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_USERIDS);
atAll = Boolean.valueOf(config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_ALL));
enableProxy = Boolean.valueOf(config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE));
if (Boolean.TRUE.equals(enableProxy)) {
port = Integer.parseInt(config.get(DingTalkParamsConstants.NAME_DING_TALK_PORT));
proxy = config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY);
user = config.get(DingTalkParamsConstants.NAME_DING_TALK_USER);
password = config.get(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD);
}
}
private static HttpPost constructHttpPost(String url, String msg) {
HttpPost post = new HttpPost(url);
StringEntity entity = new StringEntity(msg, StandardCharsets.UTF_8);
post.setEntity(entity);
post.addHeader("Content-Type", "application/json; charset=utf-8");
return post;
}
private static CloseableHttpClient getProxyClient(String proxy, int port, String user, String password) {
HttpHost httpProxy = new HttpHost(proxy, port);
CredentialsProvider provider = new BasicCredentialsProvider();
provider.setCredentials(new AuthScope(httpProxy), new UsernamePasswordCredentials(user, password));
return HttpClients.custom().setDefaultCredentialsProvider(provider).build();
}
private static CloseableHttpClient getDefaultClient() {
return HttpClients.createDefault();
}
private static RequestConfig getProxyConfig(String proxy, int port) {
HttpHost httpProxy = new HttpHost(proxy, port);
return RequestConfig.custom().setProxy(httpProxy).build();
}
private String textToJsonString(String text) {
Map<String, Object> items = new HashMap<>();
items.put("msgtype", "text");
Map<String, String> textContent = new HashMap<>();
byte[] byt = StringUtils.getBytesUtf8(text);
String txt = StringUtils.newStringUtf8(byt);
textContent.put("content", txt);
items.put("text", textContent);
setMsgAt(items);
return JSONUtils.toJsonString(items);
}
private void setMsgAt(Map<String, Object> items) {
Map<String, Object> at = new HashMap<>();
String[] atMobileArray = org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atMobiles) ? atMobiles.split(",") : new String[0];
String[] atUserArray = org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atUserIds) ? atUserIds.split(",") : new String[0];
boolean isAtAll = Objects.isNull(atAll) ? false : atAll;
at.put("atMobiles", atMobileArray);
at.put("atUserIds", atUserArray);
at.put("isAtAll", isAtAll);
items.put("at", at);
}
private AlertResult checkSendDingTalkSendMsgResult(String result) {
AlertResult alertResult = new AlertResult();
alertResult.setStatus("false");
if (null == result) {
alertResult.setMessage("send ding talk msg error");
logger.info("send ding talk msg error,ding talk server resp is null");
return alertResult;
}
DingTalkSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, DingTalkSendMsgResponse.class);
if (null == sendMsgResponse) {
alertResult.setMessage("send ding talk msg fail");
logger.info("send ding talk msg error,resp error");
return alertResult;
}
if (sendMsgResponse.errcode == 0) {
alertResult.setStatus("true");
alertResult.setMessage("send ding talk msg success");
return alertResult;
}
alertResult.setMessage(String.format("alert send ding talk msg error : %s", sendMsgResponse.getErrmsg()));
logger.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg());
return alertResult;
}
public AlertResult sendDingTalkMsg(String title, String content) {
AlertResult alertResult;
try {
String resp = sendMsg(title, content);
return checkSendDingTalkSendMsgResult(resp);
} catch (Exception e) {
logger.info("send ding talk alert msg exception : {}", e.getMessage());
alertResult = new AlertResult();
alertResult.setStatus("false");
alertResult.setMessage("send ding talk alert fail.");
}
return alertResult;
}
private String sendMsg(String title, String content) throws IOException {
StringBuilder text = new StringBuilder();
if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(keyword)) {
text.append(keyword);
text.append(":");
}
text.append(title);
text.append("\n");
text.append(content);
String msgToJson = textToJsonString(text.toString());
HttpPost httpPost = constructHttpPost(org.apache.dolphinscheduler.spi.utils.StringUtils.isBlank(secret) ? url : generateSignedUrl(), msgToJson);
CloseableHttpClient httpClient;
if (Boolean.TRUE.equals(enableProxy)) {
httpClient = getProxyClient(proxy, port, user, password);
RequestConfig rcf = getProxyConfig(proxy, port);
httpPost.setConfig(rcf);
} else {
httpClient = getDefaultClient();
}
try {
CloseableHttpResponse response = httpClient.execute(httpPost);
String resp;
try {
HttpEntity entity = response.getEntity();
resp = EntityUtils.toString(entity, "UTF-8");
EntityUtils.consume(entity);
} finally {
response.close();
}
logger.info("Ding Talk send title :{},content : {}, resp: {}", title, content, resp);
return resp;
} finally {
httpClient.close();
}
}
private String generateSignedUrl() {
Long timestamp = System.currentTimeMillis();
String stringToSign = timestamp + "\n" + secret;
String sign = org.apache.dolphinscheduler.spi.utils.StringUtils.EMPTY;
try {
Mac mac = Mac.getInstance("HmacSHA256");
mac.init(new SecretKeySpec(secret.getBytes("UTF-8"), "HmacSHA256"));
byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8"));
sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)),"UTF-8");
} catch (Exception e) {
logger.error("generate sign error, message:{}", e);
}
return url + "×tamp=" + timestamp + "&sign=" + sign;
}
static final class DingTalkSendMsgResponse {
private Integer errcode;
private String errmsg;
public DingTalkSendMsgResponse() {
}
public Integer getErrcode() {
return this.errcode;
}
public void setErrcode(Integer errcode) {
this.errcode = errcode;
}
public String getErrmsg() {
return this.errmsg;
}
public void setErrmsg(String errmsg) {
this.errmsg = errmsg;
}
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof DingTalkSendMsgResponse)) {
return false;
}
final DingTalkSendMsgResponse other = (DingTalkSendMsgResponse) o;
final Object this$errcode = this.getErrcode();
final Object other$errcode = other.getErrcode();
if (this$errcode == null ? other$errcode != null : !this$errcode.equals(other$errcode)) {
return false;
}
final Object this$errmsg = this.getErrmsg();
final Object other$errmsg = other.getErrmsg();
if (this$errmsg == null ? other$errmsg != null : !this$errmsg.equals(other$errmsg)) {
return false;
}
return true;
}
public int hashCode() {
final int PRIME = 59;
int result = 1;
final Object $errcode = this.getErrcode();
result = result * PRIME + ($errcode == null ? 43 : $errcode.hashCode());
final Object $errmsg = this.getErrmsg();
result = result * PRIME + ($errmsg == null ? 43 : $errmsg.hashCode());
return result;
}
public String toString() {
return "DingTalkSender.DingTalkSendMsgResponse(errcode=" + this.getErrcode() + ", errmsg=" + this.getErrmsg() + ")";
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import org.apache.dolphinscheduler.alert.api.AlertChannel;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class DingTalkAlertChannelFactoryTest {
@Test
public void testGetParams() {
DingTalkAlertChannelFactory dingTalkAlertChannelFactory = new DingTalkAlertChannelFactory();
List<PluginParams> params = dingTalkAlertChannelFactory.params();
JSONUtils.toJsonString(params);
Assert.assertEquals(11, params.size());
}
@Test
public void testCreate() {
DingTalkAlertChannelFactory dingTalkAlertChannelFactory = new DingTalkAlertChannelFactory();
AlertChannel alertChannel = dingTalkAlertChannelFactory.create();
Assert.assertNotNull(alertChannel);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.dingtalk;
import org.apache.dolphinscheduler.alert.api.AlertResult;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class DingTalkSenderTest {
private static final Map<String, String> dingTalkConfig = new HashMap<>();
@Before
public void initDingTalkConfig() {
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, "keyWord");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, "url");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, "false");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, "password");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PORT, "9988");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_USER, "user1,user2");
}
@Test
public void testSend() {
DingTalkSender dingTalkSender = new DingTalkSender(dingTalkConfig);
dingTalkSender.sendDingTalkMsg("keyWord+Welcome", "UTF-8");
dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, "true");
dingTalkSender = new DingTalkSender(dingTalkConfig);
AlertResult alertResult = dingTalkSender.sendDingTalkMsg("title", "content test");
Assert.assertEquals("false", alertResult.getStatus());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Force success': 'Force success',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name (required)': 'Please enter name (required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
'Delay execution time': 'Delay execution time',
'Delay execution': 'Delay execution',
'Forced success': 'Forced success',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to go online the process definition': 'Whether to go online the process definition',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Workflow Monitor',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
Success: 'Success',
Failed: 'Failed',
Delete: 'Delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main Class': 'Main Class',
'Main Package': 'Main Package',
'Please enter main package': 'Please enter main package',
'Please enter main class': 'Please enter main class',
'Main Arguments': 'Main Arguments',
'Please enter main arguments': 'Please enter main arguments',
'Option Parameters': 'Option Parameters',
'Please enter option parameters': 'Please enter option parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter the procedure method': 'Please enter the procedure script \n\ncall procedure:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\ncall function:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ',
'The procedure method script example': 'example:{call <procedure-name>[(?,?, ...)]} or {?= call <procedure-name>[(?,?, ...)]}',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver Cores': 'Driver Cores',
'Please enter Driver cores': 'Please enter Driver cores',
'Driver Memory': 'Driver Memory',
'Please enter Driver memory': 'Please enter Driver memory',
'Executor Number': 'Executor Number',
'Please enter Executor number': 'Please enter Executor number',
'The Executor number should be a positive integer': 'The Executor number should be a positive integer',
'Executor Memory': 'Executor Memory',
'Please enter Executor memory': 'Please enter Executor memory',
'Executor Cores': 'Executor Cores',
'Please enter Executor cores': 'Please enter Executor cores',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Core number should be positive integer': 'Core number should be positive integer',
'Flink Version': 'Flink Version',
'JobManager Memory': 'JobManager Memory',
'Please enter JobManager memory': 'Please enter JobManager memory',
'TaskManager Memory': 'TaskManager Memory',
'Please enter TaskManager memory': 'Please enter TaskManager memory',
'Slot Number': 'Slot Number',
'Please enter Slot number': 'Please enter Slot number',
Parallelism: 'Parallelism',
'Custom Parallelism': 'Configure parallelism',
'Please enter Parallelism': 'Please enter Parallelism',
'Parallelism tip': 'If there are a large number of tasks requiring complement, you can use the custom parallelism to ' +
'set the complement task thread to a reasonable value to avoid too large impact on the server.',
'Parallelism number should be positive integer': 'Parallelism number should be positive integer',
'TaskManager Number': 'TaskManager Number',
'Please enter TaskManager number': 'Please enter TaskManager number',
'App Name': 'App Name',
'Please enter app name(optional)': 'Please enter app name(optional)',
'SQL Type': 'SQL Type',
'Send Email': 'Send Email',
'Log display': 'Log display',
'rows of result': 'rows of result',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Switch To This Version': 'Switch To This Version',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Current Version': 'Current Version',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
'Switch Version Successfully': 'Switch Version Successfully',
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Dry-run flag': 'Dry-run flag',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'OS Tenant Code': 'OS Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Yarn Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the os tenant code in English': 'Please enter the os tenant code in English',
'Please enter os tenant code in English': 'Please enter os tenant code in English',
'Please enter os tenant code': 'Please enter os tenant code',
'Please enter tenant Name': 'Please enter tenant Name',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'User Type': 'User Type',
'Please enter phone number': 'Please enter phone number',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'UDF resources directory': 'UDF resources directory',
'Please select UDF resources directory': 'Please select UDF resources directory',
'Alarm group': 'Alarm group',
'Alarm group required': 'Alarm group required',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Create Alarm Instance': 'Create Alarm Instance',
'Edit Alarm Instance': 'Edit Alarm Instance',
'Group Name': 'Group Name',
'Alarm instance name': 'Alarm instance name',
'Alarm plugin name': 'Alarm plugin name',
'Select plugin': 'Select plugin',
'Select Alarm plugin': 'Please select an Alarm plugin',
'Please enter group name': 'Please enter group name',
'Instance parameter exception': 'Instance parameter exception',
'Group Type': 'Group Type',
'Alarm plugin instance': 'Alarm plugin instance',
'Please enter alarm plugin instance name': 'Please enter alarm plugin instance name',
'Select Alarm plugin instance': 'Please select an Alarm plugin instance',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
startDate: 'startDate',
endDate: 'endDate',
Date: 'Date',
Waiting: 'Waiting',
Execution: 'Execution',
Finish: 'Finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
'Upload File Size': 'Upload File size cannot exceed 1g',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
'Version Info': 'Version Info',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
Kill: 'Kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
'Copy path': 'Copy path',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'ReUpload File': 'ReUpload File',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Warning instance manage': 'Warning instance manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
Timezone: 'Timezone',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
AllStatus: 'All',
None: 'None',
Name: 'Name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Waiting Dependent complete': 'Waiting Dependent complete',
'Waiting Dependent start': 'Waiting Dependent start',
'Check interval': 'Check interval',
'Timeout must be longer than check interval': 'Timeout must be longer than check interval',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
'Whether dry-run': 'Whether dry-run',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Yarn Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
Addresses: 'Addresses',
'Worker Addresses': 'Worker Addresses',
'Please select the worker addresses': 'Please select the worker addresses',
'Failure time': 'Failure time',
'Expiration time': 'Expiration time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf',
'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username',
'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'Worker addresses cannot be empty': 'Worker addresses cannot be empty',
'Please generate token': 'Please generate token',
'Please Select token': 'Please select the expiration time of token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
TargetJobName: 'target job name',
'Please enter Pigeon job name': 'Please enter Pigeon job name',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter hive target dir': 'Please enter hive target dir',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'No resources exist': 'No resources exist',
'Please delete all non-existing resources': 'Please delete all non-existing resources',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
Kinship: 'Workflow relationship',
Reset: 'Reset',
KinshipStateActive: 'Current selection',
KinshipState1: 'Online',
KinshipState0: 'Workflow is not online',
KinshipState10: 'Scheduling is not online',
'Dag label display control': 'Dag label display control',
Enable: 'Enable',
Disable: 'Disable',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout': 'Connect Timeout',
'Socket Timeout': 'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
ms: 'ms',
'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077',
Master: 'Master',
'Please select the seatunnel resources': 'Please select the seatunnel resources',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting',
'Batch copy': 'Batch copy',
'Related items': 'Related items',
'Project name is required': 'Project name is required',
'Batch move': 'Batch move',
Version: 'Version',
'Pre tasks': 'Pre tasks',
'Running Memory': 'Running Memory',
'Max Memory': 'Max Memory',
'Min Memory': 'Min Memory',
'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate',
Info: 'Info',
'Datasource userName': 'owner',
'Resource userName': 'owner',
DataQuality: 'DataQuality',
'DataQuality Result': 'DataQuality Result',
'DataQuality Rule': 'DataQuality Rule',
'DataQuality Rule Json': 'Rule Definition',
'Rule Name': 'Rule Name',
Default: 'UnFinished',
Failure: 'Failure',
'Rule Type': 'Rule Type',
'Statistics Value': 'Actual Value',
'Comparison Value': 'Excepted Value',
'Comparison Type': 'Excepted Type',
'Check Type': 'Check Type',
Operator: 'Operator',
Threshold: 'Threshold',
'Single Table': 'Single Table',
'Single Table Custom Sql': 'Single Table Custom Sql',
'Multi Table Accuracy': 'Multi Table Accuracy',
'Multi Table Comparison': 'Multi Table Comparison',
'(Expected - Actual) / Expected': '(Expected - Actual) / Expected x 100%',
'Expected - Actual': 'Expected - Actual',
'Actual - Expected': 'Actual - Expected',
'Actual / Expected': 'Actual / Expected x 100%',
'InputEntry Field': 'InputEntry Field',
'InputEntry Type': 'InputEntry Type',
'InputEntry Title': 'InputEntry Title',
'Error Output Path': 'Error Output Path',
Alert: 'Alert',
Block: 'Block',
'Environment manage': 'Environment manage',
'Create environment': 'Create environment',
'Edit environment': 'Edit environment',
'Environment value': 'Environment value',
'Environment Name': 'Environment Name',
'Environment Code': 'Environment Code',
'Environment Config': 'Environment Config',
'Environment Desc': 'Environment Desc',
'Environment Worker Group': 'Worker Groups',
'Please enter environment config': 'Please enter environment config',
'Please enter environment desc': 'Please enter environment desc',
'Please select worker groups': 'Please select worker groups',
condition: 'condition',
'The condition content cannot be empty': 'The condition content cannot be empty',
'Reference from': 'Reference from',
'No more...': 'No more...',
'Task Definition': 'Task Definition',
'Create task': 'Create task',
'Task Type': 'Task Type',
'Process execute type': 'Process execute type',
parallel: 'parallel',
'Serial wait': 'Serial wait',
'Serial discard': 'Serial discard',
'Serial priority': 'Serial priority',
'Recover serial wait': 'Recover serial wait',
IsEnableProxy: 'Enable Proxy',
WebHook: 'WebHook',
webHook: 'WebHook',
Keyword: 'Keyword',
Secret: 'Secret',
AtMobiles: '@Mobiles',
AtUserIds: '@UserIds',
IsAtAll: '@All',
Proxy: 'Proxy',
receivers: 'Receivers',
receiverCcs: 'ReceiverCcs',
transportProtocol: 'Transport Protocol',
serverHost: 'SMTP Host',
serverPort: 'SMTP Port',
sender: 'Sender',
enableSmtpAuth: 'SMTP Auth',
starttlsEnable: 'SMTP STARTTLS Enable',
sslEnable: 'SMTP SSL Enable',
smtpSslTrust: 'SMTP SSL Trust',
url: 'URL',
requestType: 'Request Type',
headerParams: 'Headers',
bodyParams: 'Body',
contentField: 'Content Field',
path: 'Script Path',
userParams: 'User Params',
corpId: 'CorpId',
secret: 'Secret',
userSendMsg: 'UserSendMsg',
agentId: 'AgentId',
users: 'Users',
Username: 'Username',
username: 'Username',
showType: 'Show Type',
'Please select a task type (required)': 'Please select a task type (required)',
layoutType: 'Layout Type',
gridLayout: 'Grid',
dagreLayout: 'Dagre',
rows: 'Rows',
cols: 'Cols',
processOnline: 'Online',
searchNode: 'Search Node',
dagScale: 'Scale',
workflowName: 'Workflow Name',
scheduleStartTime: 'Schedule Start Time',
scheduleEndTime: 'Schedule End Time',
crontabExpression: 'Crontab',
workflowPublishStatus: 'Workflow Publish Status',
schedulePublishStatus: 'Schedule Publish Status',
'Task group manage': 'Task group manage',
'Task group option': 'Task group option',
'Create task group': 'Create task group',
'Edit task group': 'Edit task group',
'Delete task group': 'Delete task group',
'Task group code': 'Task group code',
'Task group name': 'Task group name',
'Task group resource pool size': 'Resource pool size',
'Task group resource pool size be a number': 'The size of the task group resource pool should be more than 1',
'Task group resource used pool size': 'Used resource',
'Task group desc': 'Task group desc',
'Task group status': 'Task group status',
'Task group enable status': 'Enable',
'Task group disable status': 'Disable',
'Please enter task group desc': 'Please enter task group description',
'Please enter task group resource pool size': 'Please enter task group resource pool size',
'Please select project': 'Please select a project',
'Task group queue': 'Task group queue',
'Task group queue priority': 'Priority',
'Task group queue priority be a number': 'The priority of the task group queue should be a positive number',
'Task group queue force starting status': 'Starting status',
'Task group in queue': 'In queue',
'Task group queue status': 'Task status',
'View task group queue': 'View task group queue',
'Task group queue the status of waiting': 'Waiting into the queue',
'Task group queue the status of queuing': 'Queuing',
'Task group queue the status of releasing': 'Released',
'Modify task group queue priority': 'Edit the priority of the task group queue',
'Force to start task': 'Force to start the task',
'Priority not empty': 'The value of priority can not be empty',
'Priority must be number': 'The value of priority should be number',
'Please select task name': 'Please select a task name',
'Audit Log': 'Audit Log',
'Resource Type': 'resource type',
'All Types': 'all types',
'All Modules': 'all modules',
'All Operations': 'all operations',
'User Audit': 'user management audit',
'Project Audit': 'project management audit',
'Create Operation': 'create',
'Update Operation': 'update',
'Delete Operation': 'delete',
'Read Operation': 'read',
'Process State': 'Process State',
'Upstream Tasks': 'Upstream Tasks',
'and {n} more': '… and {n} more',
'Move task': 'Move task',
'Delete task {taskName} from process {processName}?': 'Delete task {taskName} from process {processName}?',
'Delete task completely': 'Delete task completely',
'Please select a process': 'Please select a process',
'Delete {taskName}?': 'Delete {taskName}?',
'Please select a process (required)': 'Please select a process (required)',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
null_check: 'NullCheck',
custom_sql: 'CustomSql',
multi_table_accuracy: 'MulTableAccuracy',
multi_table_value_comparison: 'MulTableCompare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
All: 'All',
FixValue: 'FixValue',
DailyFluctuation: 'DailyFluctuation',
WeeklyFluctuation: 'WeeklyFluctuation',
MonthlyFluctuation: 'MonthlyFluctuation',
Last7DayFluctuation: 'Last7DayFluctuation',
Last30DayFluctuation: 'Last30DayFluctuation',
SrcTableTotalRows: 'SrcTableTotalRows',
TargetTableTotalRows: 'TargetTableTotalRows'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,284 | [Improvement][Alert] Dingtalk alert plugin supports markdown message type | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Dingtalk alert plugin supports markdown message type
### Use case
Dingtalk alert plugin supports markdown message type to format alert content for easy viewing of specific content.
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8284 | https://github.com/apache/dolphinscheduler/pull/8285 | 78cac085323b1a28d866ed10357c89f2122d06a3 | 36d526d3a803f2ad9203ea54d93a255663bc6eaf | "2022-02-05T12:10:47Z" | java | "2022-02-07T14:07:37Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Force success': '强制成功',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name (required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
'Delay execution time': '延时执行时间',
'Delay execution': '延时执行',
'Forced success': '强制成功',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to go online the process definition': '是否上线流程定义',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '工作流监控',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
Success: '成功',
Failed: '失败',
Delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main Class': '主函数的Class',
'Main Package': '主程序包',
'Please enter main package': '请选择主程序包',
'Please enter main class': '请填写主函数的Class',
'Main Arguments': '主程序参数',
'Please enter main arguments': '请输入主程序参数',
'Option Parameters': '选项参数',
'Please enter option parameters': '请输入选项参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
Datasource: '数据源',
methods: '方法',
'Please enter the procedure method': '请输入存储脚本 \n\n调用存储过程:{call <procedure-name>[(<arg1>,<arg2>, ...)]}\n\n调用存储函数:{?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ',
'The procedure method script example': '示例:{call <procedure-name>[(?,?, ...)]} 或 {?= call <procedure-name>[(?,?, ...)]}',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver Cores': 'Driver核心数',
'Please enter Driver cores': '请输入Driver核心数',
'Driver Memory': 'Driver内存数',
'Please enter Driver memory': '请输入Driver内存数',
'Executor Number': 'Executor数量',
'Please enter Executor number': '请输入Executor数量',
'The Executor number should be a positive integer': 'Executor数量为正整数',
'Executor Memory': 'Executor内存数',
'Please enter Executor memory': '请输入Executor内存数',
'Executor Cores': 'Executor核心数',
'Please enter Executor cores': '请输入Executor核心数',
'Memory should be a positive integer': '内存数为数字',
'Core number should be positive integer': '核心数为正整数',
'Flink Version': 'Flink版本',
'JobManager Memory': 'JobManager内存数',
'Please enter JobManager memory': '请输入JobManager内存数',
'TaskManager Memory': 'TaskManager内存数',
'Please enter TaskManager memory': '请输入TaskManager内存数',
'Slot Number': 'Slot数量',
'Please enter Slot number': '请输入Slot数量',
Parallelism: '并行度',
'Custom Parallelism': '自定义并行度',
'Please enter Parallelism': '请输入并行度',
'Parallelism number should be positive integer': '并行度必须为正整数',
'Parallelism tip': '如果存在大量任务需要补数时,可以利用自定义并行度将补数的任务线程设置成合理的数值,避免对服务器造成过大的影响',
'TaskManager Number': 'TaskManager数量',
'Please enter TaskManager number': '请输入TaskManager数量',
'App Name': '任务名称',
'Please enter app name(optional)': '请输入任务名称(选填)',
'SQL Type': 'sql类型',
'Send Email': '发送邮件',
'Log display': '日志显示',
'rows of result': '行查询结果',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Switch To This Version': '切换到该版本',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Current Version': '当前版本',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
'Switch Version Successfully': '切换版本成功',
'Confirm Switch To This Version?': '确定切换到该版本吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Dry-run flag': '空跑标识',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'OS Tenant Code': '操作系统租户',
'Tenant Name': '租户名称',
Queue: '队列',
'Please select a queue': '默认为租户关联队列',
'Please enter the os tenant code in English': '请输入操作系统租户只允许英文',
'Please enter os tenant code in English': '请输入英文操作系统租户',
'Please enter os tenant code': '请输入操作系统租户',
'Please enter tenant Name': '请输入租户名称',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'User Type': '用户类型',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Alarm group': '告警组',
'Alarm group required': '告警组必填',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Create Alarm Instance': '创建告警实例',
'Edit Alarm Instance': '编辑告警实例',
'Group Name': '组名称',
'Alarm instance name': '告警实例名称',
'Alarm plugin name': '告警插件名称',
'Select plugin': '选择插件',
'Select Alarm plugin': '请选择告警插件',
'Please enter group name': '请输入组名称',
'Instance parameter exception': '实例参数异常',
'Group Type': '组类型',
'Alarm plugin instance': '告警插件实例',
'Please enter alarm plugin instance name': '请输入告警实例名称',
'Select Alarm plugin instance': '请选择告警插件实例',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': '自定义参数prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
startDate: '开始日期',
endDate: '结束日期',
Date: '日期',
Waiting: '等待',
Execution: '执行中',
Finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
'Version Info': '版本信息',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
Kill: 'Kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
'Copy path': '复制路径',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'ReUpload File': '重新上传文件',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Warning instance manage': '告警实例管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
Timezone: '时区',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
AllStatus: '全部状态',
None: '无',
Name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Waiting Dependent complete': '等待依赖完成',
'Waiting Dependent start': '等待依赖启动',
'Check interval': '检查间隔',
'Timeout must be longer than check interval': '超时时间必须比检查间隔长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
'Whether dry-run': '是否空跑',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': 'Yarn 队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
Addresses: '地址',
'Worker Addresses': 'Worker地址',
'Please select the worker addresses': '请选择Worker地址',
'Failure time': '失效时间',
'Expiration time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf',
'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username',
'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'Worker addresses cannot be empty': 'Worker地址不能为空',
'Please generate token': '请生成Token',
'Please Select token': '请选择Token失效时间',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
TargetJobName: '目标任务名',
'Please enter Pigeon job name': '请输入Pigeon任务名',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter hive target dir': '请输入Hive临时目录',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'No resources exist': '不存在资源',
'Please delete all non-existing resources': '请删除所有不存在资源',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
Kinship: '工作流关系',
Reset: '重置',
KinshipStateActive: '当前选择',
KinshipState1: '已上线',
KinshipState0: '工作流未上线',
KinshipState10: '调度未上线',
'Dag label display control': 'Dag节点名称显隐',
Enable: '启用',
Disable: '停用',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout': '连接超时',
'Socket Timeout': 'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
ms: '毫秒',
'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077',
Master: 'Master',
'Please select the seatunnel resources': '请选择 seatunnel 配置文件',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG',
'Batch copy': '批量复制',
'Related items': '关联项目',
'Project name is required': '项目名称必填',
'Batch move': '批量移动',
Version: '版本',
'Pre tasks': '前置任务',
'Running Memory': '运行内存',
'Max Memory': '最大内存',
'Min Memory': '最小内存',
'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建',
Info: '提示',
'Datasource userName': '所属用户',
'Resource userName': '所属用户',
DataQuality: '数据质量',
'DataQuality Result': '任务结果',
'DataQuality Rule': '规则管理',
'DataQuality Rule Json': '规则定义',
'Rule Name': '规则名称',
Default: '未完成',
Failure: '失败',
'Rule Type': '规则类型',
'Statistics Value': '实际值',
'Comparison Value': '期望值',
'Comparison Type': '期望值类型',
'Check Type': '检测类型',
Operator: '操作符',
Threshold: '阈值',
'Single Table': '单表检测',
'Single Table Custom Sql': '自定义SQL',
'Multi Table Accuracy': '多表准确性',
'Multi Table Comparison': '两表值对比',
'(Expected - Actual) / Expected': '(期望值-实际值)/实际值 x 100%',
'Expected - Actual': '期望值-实际值',
'Actual - Expected': '实际值-期望值',
'Actual / Expected': '实际值/期望值 x 100%',
'InputEntry Field': '输入项占位符',
'InputEntry Type': '输入项类型',
'InputEntry Title': '输入项标题',
'Error Output Path': '错误数据路径',
Alert: '告警',
Block: '阻断',
'Environment manage': '环境管理',
'Create environment': '创建环境',
'Edit environment': '编辑',
'Environment value': 'Environment value',
'Environment Name': '环境名称',
'Environment Code': '环境编码',
'Environment Config': '环境配置',
'Environment Desc': '详细描述',
'Environment Worker Group': 'Worker组',
'Please enter environment config': '请输入环境配置信息',
'Please enter environment desc': '请输入详细描述',
'Please select worker groups': '请选择Worker分组',
condition: '条件',
'The condition content cannot be empty': '条件内容不能为空',
'Reference from': '使用已有任务',
'No more...': '没有更多了...',
'Task Definition': '任务定义',
'Create task': '创建任务',
'Task Type': '任务类型',
'Process execute type': '执行策略',
parallel: '并行',
'Serial wait': '串行等待',
'Serial discard': '串行抛弃',
'Serial priority': '串行优先',
'Recover serial wait': '串行恢复',
IsEnableProxy: '启用代理',
WebHook: 'Web钩子',
webHook: 'Web钩子',
Keyword: '关键词',
Secret: '密钥',
AtMobiles: '@手机号',
AtUserIds: '@用户ID',
IsAtAll: '@所有人',
Proxy: '代理',
receivers: '收件人',
receiverCcs: '抄送人',
transportProtocol: '邮件协议',
serverHost: 'SMTP服务器',
serverPort: 'SMTP端口',
sender: '发件人',
enableSmtpAuth: '请求认证',
starttlsEnable: 'STARTTLS连接',
sslEnable: 'SSL连接',
smtpSslTrust: 'SSL证书信任',
url: 'URL',
requestType: '请求方式',
headerParams: '请求头',
bodyParams: '请求体',
contentField: '内容字段',
path: '脚本路径',
userParams: '自定义参数',
corpId: '企业ID',
secret: '密钥',
teamSendMsg: '群发信息',
userSendMsg: '群员信息',
agentId: '应用ID',
users: '群员',
Username: '用户名',
username: '用户名',
showType: '内容展示类型',
'Please select a task type (required)': '请选择任务类型(必选)',
layoutType: '布局类型',
gridLayout: '网格布局',
dagreLayout: '层次布局',
rows: '行数',
cols: '列数',
processOnline: '已上线',
searchNode: '搜索节点',
dagScale: '缩放',
workflowName: '工作流名称',
scheduleStartTime: '定时开始时间',
scheduleEndTime: '定时结束时间',
crontabExpression: 'Crontab',
workflowPublishStatus: '工作流上线状态',
schedulePublishStatus: '定时状态',
'Task group manage': '任务组管理',
'Task group option': '任务组配置',
'Create task group': '创建任务组',
'Edit task group': '编辑任务组',
'Delete task group': '删除任务组',
'Task group code': '任务组编号',
'Task group name': '任务组名称',
'Task group resource pool size': '资源容量',
'Task group resource used pool size': '已用资源',
'Task group desc': '描述信息',
'Task group status': '任务组状态',
'Task group enable status': '启用',
'Task group disable status': '不可用',
'Please enter task group desc': '请输入任务组描述',
'Please enter task group resource pool size': '请输入资源容量大小',
'Task group resource pool size be a number': '资源容量大小必须大于等于1的数值',
'Please select project': '请选择项目',
'Task group queue': '任务组队列',
'Task group queue priority': '组内优先级',
'Task group queue priority be a number': '优先级必须是大于等于0的数值',
'Task group queue force starting status': '是否强制启动',
'Task group in queue': '是否排队中',
'Task group queue status': '任务状态',
'View task group queue': '查看任务组队列',
'Task group queue the status of waiting': '等待入队',
'Task group queue the status of queuing': '排队中',
'Task group queue the status of releasing': '已释放',
'Modify task group queue priority': '修改优先级',
'Force to start task': '强制启动',
'Priority not empty': '优先级不能为空',
'Priority must be number': '优先级必须是数值',
'Please select task name': '请选择节点名称',
'Audit Log': '审计日志',
'Resource Type': '资源类型',
'All Types': '所有类型',
'All Operations': '所有操作',
'User Audit': '用户管理审计',
'Project Audit': '项目管理审计',
'Create Operation': '创建',
'Update Operation': '更新',
'Delete Operation': '删除',
'Read Operation': '读取',
'Process State': '工作流状态',
'Upstream Tasks': '上游任务',
'and {n} more': '…等{n}个',
'Move task': '移动任务',
'Delete task {taskName} from process {processName}?': '将任务 {taskName} 从工作流 {processName} 中删除?',
'Delete task completely': '彻底删除任务',
'Please select a process': '请选择工作流',
'Delete {taskName}?': '确定删除 {taskName} ?',
'Please select a process (required)': '请选择工作流(必选)',
src_connector_type: '源数据类型',
src_datasource_id: '源数据源',
src_table: '源数据表',
src_filter: '源表过滤条件',
src_field: '源表检测列',
statistics_name: '实际值名',
check_type: '校验方式',
operator: '校验操作符',
threshold: '阈值',
failure_strategy: '失败策略',
target_connector_type: '目标数据类型',
target_datasource_id: '目标数据源',
target_table: '目标数据表',
target_filter: '目标表过滤条件',
mapping_columns: 'ON语句',
statistics_execute_sql: '实际值计算SQL',
comparison_name: '期望值名',
comparison_execute_sql: '期望值计算SQL',
comparison_type: '期望值类型',
writer_connector_type: '输出数据类型',
writer_datasource_id: '输出数据源',
target_field: '目标表检测列',
field_length: '字段长度限制',
logic_operator: '逻辑操作符',
regexp_pattern: '正则表达式',
deadline: '截止时间',
datetime_format: '时间格式',
enum_list: '枚举值列表',
begin_time: '起始时间',
fix_value: '固定值',
null_check: '空值检测',
custom_sql: '自定义SQL',
multi_table_accuracy: '多表准确性',
multi_table_value_comparison: '两表值比对',
field_length_check: '字段长度校验',
uniqueness_check: '唯一性校验',
regexp_check: '正则表达式',
timeliness_check: '及时性校验',
enumeration_check: '枚举值校验',
table_count_check: '表行数校验',
All: '全部',
FixValue: '固定值',
DailyFluctuation: '日波动',
WeeklyFluctuation: '周波动',
MonthlyFluctuation: '月波动',
Last7DayFluctuation: '最近7天波动',
Last30DayFluctuation: '最近30天波动',
SrcTableTotalRows: '源表总行数',
TargetTableTotalRows: '目标表总行数'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,303 | [Bug] [DAO] There're a few syntax errors in the file of DqRuleInputEntryMapper.xml. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I use postgresql as database and view the page of rule management in the module of data quality the process throw an exception about syntax error.
The exception details is as follows:
```
[ERROR] 2022-02-08 11:34:47.145 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 获取规则分页列表错误
org.springframework.jdbc.BadSqlGrammarException:
### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: syntax error at or near "`"
位置:55
### The error may exist in file [/Users/calvin/MyCloud/zhilian/workspace/dolphinscheduler/dolphinscheduler-dao/target/classes/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml]
### The error may involve defaultParameterMap
### The error occurred while setting parameters
### SQL: SELECT a.id, a.field, a.`type`, a.title, a.value, a.`options`, a.placeholder, a.option_source_type, a.value_type, a.input_type, a.is_show, a.can_edit, a.is_emit, a.is_validate, b.values_map, b.index FROM t_ds_dq_rule_input_entry a join ( SELECT * FROM t_ds_relation_rule_input_entry where rule_id = ? ) b on a.id = b.rule_input_entry_id order by b.index
### Cause: org.postgresql.util.PSQLException: ERROR: syntax error at or near "`"
位置:55
; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: syntax error at or near "`"
位置:55
at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239)
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:70)
at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74)
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440)
at com.sun.proxy.$Proxy120.selectList(Unknown Source)
at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223)
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158)
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76)
at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61)
at com.sun.proxy.$Proxy159.getRuleInputEntryList(Unknown Source)
at org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl.lambda$queryRuleListPaging$0(DqRuleServiceImpl.java:195)
at java.util.ArrayList.forEach(ArrayList.java:1257)
at org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl.queryRuleListPaging(DqRuleServiceImpl.java:193)
at org.apache.dolphinscheduler.api.controller.DataQualityController.queryRuleListPaging(DataQualityController.java:116)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:205)
at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:150)
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:117)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:895)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:808)
at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87)
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1067)
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:963)
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006)
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:898)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:497)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:584)
at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)
at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1631)
at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:91)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.springframework.boot.actuate.metrics.web.servlet.WebMvcMetricsFilter.doFilterInternal(WebMvcMetricsFilter.java:96)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:548)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143)
at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:600)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:763)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.server.Server.handle(Server.java:516)
at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:400)
at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:645)
at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:392)
at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)
at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)
at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.postgresql.util.PSQLException: ERROR: syntax error at or near "`"
位置:55
at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440)
at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183)
at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308)
at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441)
at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365)
at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143)
at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132)
at com.zaxxer.hikari.pool.ProxyPreparedStatement.execute(ProxyPreparedStatement.java:44)
at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.execute(HikariProxyPreparedStatement.java)
at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64)
at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63)
at com.sun.proxy.$Proxy230.query(Unknown Source)
at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67)
at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324)
at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156)
at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:136)
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147)
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426)
... 82 common frames omitted
```
### What you expected to happen
I expect that I can view the page of rule management correctly.
### How to reproduce
You can reproduce that when you use the postgresql as the database and browse the page of rule management.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8303 | https://github.com/apache/dolphinscheduler/pull/8306 | f93dceaf9b951f2925b9cafa2f68796e8734e9ff | 5f25d31a26855024d6765d91cb64e6e320cb47c2 | "2022-02-08T05:07:26Z" | java | "2022-02-08T07:32:58Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper">
<select id="getRuleInputEntryList" resultType="org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry">
SELECT a.id,
a.field,
a.`type`,
a.title,
a.value,
a.`options`,
a.placeholder,
a.option_source_type,
a.value_type,
a.input_type,
a.is_show,
a.can_edit,
a.is_emit,
a.is_validate,
b.values_map,
b.index
FROM t_ds_dq_rule_input_entry a join ( SELECT *
FROM t_ds_relation_rule_input_entry where rule_id = #{ruleId} ) b
on a.id = b.rule_input_entry_id order by b.index
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,181 | [Bug] [UI] There is no obvious difference between a disabled task and a normal task, and it is not easy to identify | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
There is no obvious difference between a disabled task and a normal task, and it is not easy to identify.
Whether disabled tasks can be distinguished by adding special identifiers.
### What you expected to happen
There can be a clear difference .
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8181 | https://github.com/apache/dolphinscheduler/pull/8332 | fc9a31f81375fff820a6a718b606b359073cd892 | 42aab0f47ad65909c7ca498d644642e61a377b29 | "2022-01-25T00:18:30Z" | java | "2022-02-10T10:39:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="dag-canvas">
<dag-taskbar @on-drag-start="onDragStart" />
<div
class="dag-container"
ref="container"
@dragenter.prevent
@dragover.prevent
@dragleave.prevent
@drop.stop.prevent="onDrop"
>
<div ref="paper" class="paper"></div>
<div ref="minimap" class="minimap"></div>
<div class="scale-slider">
<span class="scale-title">{{$t('dagScale')}}</span>
<el-slider
v-model="scale"
vertical
:max="2"
:min="0.2"
:step="0.2"
:marks="SCALE_MARKS"
@input='scaleChange'
/>
</div>
<context-menu ref="contextMenu" />
</div>
<layout-config-modal ref="layoutModal" @submit="format" />
</div>
</template>
<script>
import _ from 'lodash'
import { Graph, DataUri } from '@antv/x6'
import dagTaskbar from './taskbar.vue'
import contextMenu from './contextMenu.vue'
import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue'
import {
NODE,
EDGE,
X6_NODE_NAME,
X6_EDGE_NAME,
NODE_STATUS_MARKUP
} from './x6-helper'
import { DagreLayout, GridLayout } from '@antv/layout'
import { tasksType, tasksState } from '../config'
import { mapActions, mapMutations, mapState } from 'vuex'
import nodeStatus from './nodeStatus'
import x6StyleMixin from './x6-style-mixin'
const SCALE_MARKS = {
0.2: '0.2',
1: '1',
2: '2'
}
export default {
name: 'dag-canvas',
data () {
return {
graph: null,
// Used to calculate the context menu location
originalScrollPosition: {
left: 0,
top: 0
},
editable: true,
dragging: {
// Distance from the mouse to the top-left corner of the dragging element
x: 0,
y: 0,
type: ''
},
// The canvas scale
scale: 1,
SCALE_MARKS
}
},
provide () {
return {
dagCanvas: this
}
},
mixins: [x6StyleMixin],
inject: ['dagChart'],
components: {
dagTaskbar,
contextMenu,
layoutConfigModal
},
computed: {
...mapState('dag', ['tasks'])
},
methods: {
...mapActions('dag', ['genTaskCodeList']),
...mapMutations('dag', ['removeTask']),
/**
* Recalculate the paper width and height
*/
paperResize () {
const w = this.$el.offsetWidth
const h = this.$el.offsetHeight
this.graph.resize(w, h)
},
/**
* Init graph
* This will be called in the dag-chart mounted event
* @param {boolean} uneditable
*/
graphInit (editable) {
const self = this
this.editable = !!editable
const paper = this.$refs.paper
const minimap = this.$refs.minimap
const graph = (this.graph = new Graph({
container: paper,
selecting: {
enabled: true,
multiple: true,
rubberband: true,
rubberEdge: true,
movable: true,
showNodeSelectionBox: false
},
scaling: {
min: 0.2,
max: 2
},
mousewheel: {
enabled: true,
modifiers: ['ctrl', 'meta']
},
scroller: true,
grid: {
size: 10,
visible: true
},
snapline: true,
minimap: {
enabled: true,
container: minimap,
scalable: false,
width: 200,
height: 120
},
interacting: {
edgeLabelMovable: false,
nodeMovable: !!editable,
magnetConnectable: !!editable
},
connecting: {
// Whether multiple edges can be created between the same start node and end
allowMulti: false,
// Whether a point is allowed to connect to a blank position on the canvas
allowBlank: false,
// The start node and the end node are the same node
allowLoop: false,
// Whether an edge is allowed to link to another edge
allowEdge: false,
// Whether edges are allowed to link to nodes
allowNode: true,
// Whether to allow edge links to ports
allowPort: false,
// Whether all available ports or nodes are highlighted when you drag the edge
highlight: true,
createEdge () {
return graph.createEdge({ shape: X6_EDGE_NAME })
},
validateConnection (data) {
const { sourceCell, targetCell } = data
if (
sourceCell &&
targetCell &&
sourceCell.isNode() &&
targetCell.isNode()
) {
const edgeData = {
sourceId: Number(sourceCell.id),
targetId: Number(targetCell.id)
}
if (!self.edgeIsValid(edgeData)) {
return false
}
}
return true
}
},
highlighting: {
nodeAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAdsorbed: {
name: 'className',
args: {
className: 'adsorbed'
}
}
}
}))
this.registerX6Shape()
this.bindGraphEvent()
this.originalScrollPosition = graph.getScrollbarPosition()
},
/**
* Register custom shapes
*/
registerX6Shape () {
Graph.unregisterNode(X6_NODE_NAME)
Graph.unregisterEdge(X6_EDGE_NAME)
Graph.registerNode(X6_NODE_NAME, { ...NODE })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE })
},
/**
* Bind grap event
*/
bindGraphEvent () {
this.bindStyleEvent(this.graph)
// update scale bar
this.graph.on('scale', ({ sx }) => {
this.scale = sx
})
// right click
this.graph.on('node:contextmenu', ({ x, y, cell }) => {
const { left, top } = this.graph.getScrollbarPosition()
const o = this.originalScrollPosition
this.$refs.contextMenu.show(x + (o.left - left), y + (o.top - top))
this.$refs.contextMenu.setCurrentTask({
name: cell.data.taskName,
type: cell.data.taskType,
code: Number(cell.id)
})
})
// node double click
this.graph.on('node:dblclick', ({ cell }) => {
this.dagChart.openFormModel(Number(cell.id), cell.data.taskType)
})
// create edge label
this.graph.on('edge:dblclick', ({ cell }) => {
const labelName = this.getEdgeLabelName(cell)
this.dagChart.$refs.edgeEditModel.show({
id: cell.id,
label: labelName
})
})
// Make sure the edge starts with node, not port
this.graph.on('edge:connected', ({ isNew, edge }) => {
if (isNew) {
const sourceNode = edge.getSourceNode()
edge.setSource(sourceNode)
}
})
// Add a node tool when the mouse entering
this.graph.on('node:mouseenter', ({ e, x, y, node, view }) => {
const nodeName = node.getData().taskName
node.addTools({
name: 'button',
args: {
markup: [
{
tagName: 'text',
textContent: nodeName,
attrs: {
fill: '#868686',
'font-size': 16,
'text-anchor': 'center'
}
}
],
x: 0,
y: 0,
offset: { x: 0, y: -10 }
}
})
})
// Remove all tools when the mouse leaving
this.graph.on('node:mouseleave', ({ node }) => {
node.removeTools()
})
},
/**
* @param {Edge|string} edge
*/
getEdgeLabelName (edge) {
if (typeof edge === 'string') edge = this.graph.getCellById(edge)
const labels = edge.getLabels()
const labelName = _.get(labels, ['0', 'attrs', 'label', 'text'], '')
return labelName
},
/**
* Set edge label by id
* @param {string} id
* @param {string} label
*/
setEdgeLabel (id, label) {
const edge = this.graph.getCellById(id)
edge.setLabels(label)
},
/**
* @param {number} limit
* @param {string} text
* Each Chinese character is equal to two chars
*/
truncateText (text, n) {
const exp = /[\u4E00-\u9FA5]/
let res = ''
let len = text.length
let chinese = text.match(new RegExp(exp, 'g'))
if (chinese) {
len += chinese.length
}
if (len > n) {
let i = 0
let acc = 0
while (true) {
let char = text[i]
if (exp.test(char)) {
acc += 2
} else {
acc++
}
if (acc > n) break
res += char
i++
}
res += '...'
} else {
res = text
}
return res
},
/**
* Set node name by id
* @param {string|number} id
* @param {string} name
*/
setNodeName (id, name) {
id += ''
const node = this.graph.getCellById(id)
if (node) {
const truncation = this.truncateText(name, 18)
node.attr('title/text', truncation)
node.setData({ taskName: name })
}
},
/**
* Convert the graph to JSON
* @return {{cells:Cell[]}}
*/
toJSON () {
return this.graph.toJSON()
},
/**
* Generate graph with JSON
*/
fromJSON (json) {
this.graph.fromJSON(json)
},
/**
* getNodes
* @return {Node[]}
*/
// interface Node {
// id: number;
// position: {x:number;y:number};
// data: {taskType:string;taskName:string;}
// }
getNodes () {
const nodes = this.graph.getNodes()
return nodes.map((node) => {
const position = node.getPosition()
const data = node.getData()
return {
id: Number(node.id),
position: position,
data: data
}
})
},
/**
* getEdges
* @return {Edge[]} Edge is inherited from the Cell
*/
// interface Edge {
// id: string;
// label: string;
// sourceId: number;
// targetId: number;
// }
getEdges () {
const edges = this.graph.getEdges()
return edges.map((edge) => {
const labelData = edge.getLabelAt(0)
return {
id: edge.id,
label: _.get(labelData, ['attrs', 'label', 'text'], ''),
sourceId: Number(edge.getSourceCellId()),
targetId: Number(edge.getTargetCellId())
}
})
},
/**
* downloadPNG
* @param {string} filename
*/
downloadPNG (fileName = 'chart') {
this.graph.toPNG(
(dataUri) => {
DataUri.downloadDataUri(dataUri, `${fileName}.png`)
},
{
padding: {
top: 50,
right: 50,
bottom: 50,
left: 50
},
backgroundColor: '#f2f3f7'
}
)
},
showLayoutModal () {
const layoutModal = this.$refs.layoutModal
if (layoutModal) {
layoutModal.show()
}
},
/**
* format
* @desc Auto layout use @antv/layout
*/
format (layoutConfig) {
if (!layoutConfig) {
layoutConfig = DEFAULT_LAYOUT_CONFIG
}
this.graph.cleanSelection()
let layoutFunc = null
if (layoutConfig.type === LAYOUT_TYPE.DAGRE) {
layoutFunc = new DagreLayout({
type: LAYOUT_TYPE.DAGRE,
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return layoutConfig.ranksep + max
},
nodesep: layoutConfig.nodesep,
controlPoints: true
})
} else if (layoutConfig.type === LAYOUT_TYPE.GRID) {
layoutFunc = new GridLayout({
type: LAYOUT_TYPE.GRID,
preventOverlap: true,
preventOverlapPadding: layoutConfig.padding,
sortBy: '_index',
rows: layoutConfig.rows || undefined,
cols: layoutConfig.cols || undefined,
nodeSize: 220
})
}
const json = this.toJSON()
const nodes = json.cells
.filter((cell) => cell.shape === X6_NODE_NAME)
.map((item) => {
return {
...item,
// sort by code aesc
_index: -item.id
}
})
const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME)
const newModel = layoutFunc.layout({
nodes: nodes,
edges: edges
})
this.fromJSON(newModel)
},
/**
* add a node to the graph
* @param {string|number} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
addNode (id, taskType, coordinate = { x: 100, y: 100 }) {
id += ''
if (!tasksType[taskType]) {
console.warn(`taskType:${taskType} is invalid!`)
return
}
const node = this.genNodeJSON(id, taskType, '', coordinate)
this.graph.addNode(node)
},
/**
* generate node json
* @param {number|string} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
genNodeJSON (id, taskType, taskName, coordinate = { x: 100, y: 100 }) {
id += ''
const url = require(`../images/task-icos/${taskType.toLocaleLowerCase()}.png`)
const truncation = taskName ? this.truncateText(taskName, 18) : id
return {
id: id,
shape: X6_NODE_NAME,
x: coordinate.x,
y: coordinate.y,
data: {
taskType: taskType,
taskName: taskName
},
attrs: {
image: {
// Use href instead of xlink:href, you may lose the icon when downloadPNG
'xlink:href': url
},
title: {
text: truncation
}
}
}
},
/**
* generate edge json
* @param {number|string} sourceId
* @param {number|string} targetId
* @param {string} label
*/
genEdgeJSON (sourceId, targetId, label = '') {
sourceId += ''
targetId += ''
return {
shape: X6_EDGE_NAME,
source: {
cell: sourceId
},
target: {
cell: targetId
},
labels: label ? [label] : undefined
}
},
/**
* remove a node
* @param {string|number} id NodeId
*/
removeNode (id) {
id += ''
this.graph.removeNode(id)
this.removeTask(+id)
},
/**
* remove an edge
* @param {string} id EdgeId
*/
removeEdge (id) {
this.graph.removeEdge(id)
},
/**
* remove multiple cells
* @param {Cell[]} cells
*/
removeCells (cells) {
this.graph.removeCells(cells)
cells.forEach((cell) => {
if (cell.isNode()) {
this.removeTask(+cell.id)
}
})
},
/**
* Verify whether edge is valid
* The number of edges start with CONDITIONS task cannot be greater than 2
*/
edgeIsValid (edge) {
const { sourceId } = edge
const sourceTask = this.tasks.find((task) => task.code === sourceId)
if (sourceTask.taskType === 'CONDITIONS') {
const edges = this.getEdges()
return edges.filter((e) => e.sourceId === sourceTask.code).length <= 2
}
return true
},
/**
* Gets the current selections
* @return {Cell[]}
*/
getSelections () {
return this.graph.getSelectedCells()
},
/**
* Lock scroller
*/
lockScroller () {
this.graph.lockScroller()
},
/**
* Unlock scroller
*/
unlockScroller () {
this.graph.unlockScroller()
},
/**
* set node status icon
* @param {number} code
* @param {string} state
*/
setNodeStatus ({ code, state, taskInstance }) {
code += ''
const stateProps = tasksState[state]
const node = this.graph.getCellById(code)
if (node) {
// Destroy the previous dom
node.removeMarkup()
node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP))
const nodeView = this.graph.findViewByCell(node)
const el = nodeView.find('div')[0]
nodeStatus({
stateProps,
taskInstance
}).$mount(el)
}
},
/**
* Drag && Drop Event
*/
onDragStart (e, taskType) {
if (!this.editable) {
e.preventDefault()
return
}
this.dragging = {
x: e.offsetX,
y: e.offsetY,
type: taskType.name
}
},
onDrop (e) {
const { type } = this.dragging
const { x, y } = this.calcGraphCoordinate(e.clientX, e.clientY)
this.genTaskCodeList({
genNum: 1
})
.then((res) => {
const [code] = res
this.addNode(code, type, { x, y })
this.dagChart.openFormModel(code, type)
})
.catch((err) => {
console.error(err)
})
},
calcGraphCoordinate (mClientX, mClientY) {
// Distance from the mouse to the top-left corner of the container;
const { left: cX, top: cY } =
this.$refs.container.getBoundingClientRect()
const mouseX = mClientX - cX
const mouseY = mClientY - cY
// The distance that paper has been scrolled
const { left: sLeft, top: sTop } = this.graph.getScrollbarPosition()
const { left: oLeft, top: oTop } = this.originalScrollPosition
const scrollX = sLeft - oLeft
const scrollY = sTop - oTop
// Distance from the mouse to the top-left corner of the dragging element;
const { x: eX, y: eY } = this.dragging
return {
x: mouseX + scrollX - eX,
y: mouseY + scrollY - eY
}
},
/**
* Get prev nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPrevNodes(node2.code) => [node1]
*/
getPrevNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.targetId === code)
.map((edge) => nodesMap[edge.sourceId])
},
/**
* set prev nodes
* @param {number} code
* @param {number[]} preNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPreNodes (code, preNodeCodes, override) {
const edges = this.getEdges()
const currPreCodes = []
edges.forEach((edge) => {
if (edge.targetId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPreCodes.push(edge.sourceId)
}
}
})
preNodeCodes.forEach((preCode) => {
if (currPreCodes.includes(preCode) || preCode === code) return
const edge = this.genEdgeJSON(preCode, code)
this.graph.addEdge(edge)
})
},
/**
* Get post nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPostNodes(node2.code) => [node3]
*/
getPostNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.sourceId === code)
.map((edge) => nodesMap[edge.targetId])
},
/**
* set post nodes
* @param {number} code
* @param {number[]} postNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPostNodes (code, postNodeCodes, override) {
const edges = this.getEdges()
const currPostCodes = []
edges.forEach((edge) => {
if (edge.sourceId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPostCodes.push(edge.targetId)
}
}
})
postNodeCodes.forEach((postCode) => {
if (currPostCodes.includes(postCode) || postCode === code) return
const edge = this.genEdgeJSON(code, postCode)
this.graph.addEdge(edge)
})
},
/**
* Navigate to cell
* @param {string} taskName
*/
navigateTo (taskName) {
const nodes = this.getNodes()
nodes.forEach((node) => {
if (node.data.taskName === taskName) {
const id = node.id
const cell = this.graph.getCellById(id)
this.graph.scrollToCell(cell, { animation: { duration: 600 } })
this.graph.cleanSelection()
this.graph.select(cell)
}
})
},
/**
* Canvas scale
*/
scaleChange (val) {
this.graph.zoomTo(val)
}
}
}
</script>
<style lang="scss" scoped>
@import "./canvas";
</style>
<style lang="scss">
@import "./x6-style";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,181 | [Bug] [UI] There is no obvious difference between a disabled task and a normal task, and it is not easy to identify | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
There is no obvious difference between a disabled task and a normal task, and it is not easy to identify.
Whether disabled tasks can be distinguished by adding special identifiers.
### What you expected to happen
There can be a clear difference .
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8181 | https://github.com/apache/dolphinscheduler/pull/8332 | fc9a31f81375fff820a6a718b606b359073cd892 | 42aab0f47ad65909c7ca498d644642e61a377b29 | "2022-01-25T00:18:30Z" | java | "2022-02-10T10:39:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div :class="['dag-chart', fullScreen ? 'full-screen' : '']">
<dag-toolbar />
<dag-canvas ref="canvas" />
<el-drawer
:visible.sync="taskDrawer"
size=""
:with-header="false"
:wrapperClosable="false"
class="task-drawer"
>
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-form-model
v-if="taskDrawer"
:nodeData="nodeData"
:project-code="projectCode"
@seeHistory="seeHistory"
@addTaskInfo="addTaskInfo"
@close="closeTaskDrawer"
@onSubProcess="toSubProcess"
:type="type"
></m-form-model>
</el-drawer>
<el-dialog
:title="$t('Set the DAG diagram name')"
:visible.sync="saveDialog"
width="auto"
>
<m-udp ref="mUdp" @onUdp="onSave" @close="cancelSave"></m-udp>
</el-dialog>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="auto"
>
<m-start
:startData="{ code: definitionCode, name: name }"
:startNodeList="startTaskName"
:sourceType="'contextmenu'"
@onUpdateStart="onUpdateStart"
@closeStart="closeStart"
></m-start>
</el-dialog>
<edge-edit-model ref="edgeEditModel" />
<el-drawer :visible.sync="versionDrawer" size="" :with-header="false">
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-versions
:versionData="versionData"
:isInstance="type === 'instance'"
@mVersionSwitchProcessDefinitionVersion="switchProcessVersion"
@mVersionGetProcessDefinitionVersionsPage="getProcessVersions"
@mVersionDeleteProcessDefinitionVersion="deleteProcessVersion"
@closeVersion="closeVersion"
></m-versions>
</el-drawer>
<m-log
v-if="type === 'instance' && logDialog"
:item="logTaskInstance"
source='dag'
:task-instance-id="logTaskInstance.id"
@close="closeLogDialog"
></m-log>
</div>
</template>
<script>
import { debounce } from 'lodash'
import dagToolbar from './canvas/toolbar.vue'
import dagCanvas from './canvas/canvas.vue'
import mFormModel from '../_source/formModel/formModel.vue'
import { mapActions, mapState, mapMutations } from 'vuex'
import mUdp from '../_source/udp/udp.vue'
import mStart from '../../projects/pages/definition/pages/list/_source/start.vue'
import edgeEditModel from './canvas/edgeEditModel.vue'
import mVersions from '../../projects/pages/definition/pages/list/_source/versions.vue'
import mLog from './formModel/log.vue'
const DEFAULT_NODE_DATA = {
id: null,
taskType: '',
self: {},
instanceId: null
}
export default {
name: 'dag-chart',
components: {
dagCanvas,
dagToolbar,
mFormModel,
mUdp,
mStart,
edgeEditModel,
mVersions,
mLog
},
provide () {
return {
dagChart: this
}
},
inject: ['definitionDetails'],
props: {
type: String,
releaseState: String
},
data () {
return {
definitionCode: 0,
// full screen mode
fullScreen: false,
// whether the task config drawer is visible
taskDrawer: false,
nodeData: { ...DEFAULT_NODE_DATA },
// whether the save dialog is visible
saveDialog: false,
// whether the start dialog is visible
startDialog: false,
startTaskName: '',
// whether the version drawer is visible
versionDrawer: false,
versionData: {
processDefinition: {
id: null,
version: '',
releaseState: ''
},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
// the task status refresh timer
statusTimer: null,
// the process instance id
instanceId: -1,
// log dialog
logDialog: false,
logTaskInstance: null,
taskInstances: []
}
},
mounted () {
this.setIsEditDag(false)
if (this.type === 'instance') {
this.instanceId = this.$route.params.id
this.definitionCode = this.$route.query.code || this.code
} else if (this.type === 'definition') {
this.definitionCode = this.$route.params.code
}
// auto resize canvas
this.resizeDebounceFunc = debounce(this.canvasResize, 200)
window.addEventListener('resize', this.resizeDebounceFunc)
// init graph
this.$refs.canvas.graphInit(!this.isDetails)
// backfill graph with tasks, locations and connects
this.backfill()
// refresh task status
if (this.type === 'instance') {
this.refreshTaskStatus()
// status polling
this.statusTimer = setInterval(() => {
this.refreshTaskStatus()
}, 90000)
}
},
beforeDestroy () {
this.resetParams()
clearInterval(this.statusTimer)
window.removeEventListener('resize', this.resizeDebounceFunc)
},
computed: {
...mapState('dag', [
'tasks',
'locations',
'connects',
'name',
'isDetails',
'projectCode',
'version',
'code'
])
},
methods: {
...mapActions('dag', [
'saveDAGchart',
'updateInstance',
'updateDefinition',
'getTaskState',
'getStartCheck',
'genTaskCodeList',
'switchProcessDefinitionVersion',
'getProcessDefinitionVersionsPage',
'deleteProcessDefinitionVersion'
]),
...mapMutations('dag', [
'addTask',
'setConnects',
'resetParams',
'setIsEditDag',
'setName',
'setLocations',
'resetLocalParam',
'setDependResult'
]),
/**
* Toggle full screen
*/
canvasResize () {
const canvas = this.$refs.canvas
canvas && canvas.paperResize()
},
toggleFullScreen () {
this.fullScreen = !this.fullScreen
this.$nextTick(this.canvasResize)
},
/**
* Task Drawer
* @param {boolean} visible
*/
toggleTaskDrawer (visible) {
this.taskDrawer = visible
},
/**
* Set the current node data
*/
setNodeData (nodeData) {
this.nodeData = Object.assign(DEFAULT_NODE_DATA, nodeData)
},
/**
* open form model
* @desc Edit task config
* @param {number} taskCode
* @param {string} taskType
*/
openFormModel (taskCode, taskType) {
this.setNodeData({
id: taskCode,
taskType: taskType
})
this.toggleTaskDrawer(true)
},
addTaskInfo ({ item }) {
this.addTask(item)
this.$refs.canvas.setNodeName(item.code, item.name)
this.taskDrawer = false
},
closeTaskDrawer ({ flag }) {
if (flag) {
const canvas = this.$refs.canvas
canvas.removeNode(this.nodeData.id)
}
this.taskDrawer = false
},
/**
* Save dialog
*/
toggleSaveDialog (value) {
this.saveDialog = value
if (value) {
this.$nextTick(() => {
this.$refs.mUdp.reloadParam()
})
}
},
onSave (sourceType) {
this.toggleSaveDialog(false)
return new Promise((resolve, reject) => {
let tasks = this.tasks || []
const edges = this.$refs.canvas.getEdges()
const nodes = this.$refs.canvas.getNodes()
if (!nodes.length) {
reject(this.$t('Failed to create node to save'))
}
const connects = this.buildConnects(edges, tasks)
this.setConnects(connects)
const locations = nodes.map((node) => {
return {
taskCode: node.id,
x: node.position.x,
y: node.position.y
}
})
this.setLocations(locations)
resolve({
connects: connects,
tasks: tasks,
locations: locations
})
})
.then((res) => {
if (this.verifyConditions(res.tasks)) {
this.loading(true)
const isEdit = !!this.definitionCode
if (isEdit) {
const methodName = this.type === 'instance' ? 'updateInstance' : 'updateDefinition'
const methodParam = this.type === 'instance' ? this.instanceId : this.definitionCode
// Edit
return this[methodName](methodParam)
.then((res) => {
this.$message({
message: res.msg,
type: 'success',
offset: 80
})
if (this.type === 'instance') {
this.$router.push({
path: `/projects/${this.projectCode}/instance/list/${methodParam}`
})
} else {
this.$router.push({
path: `/projects/${this.projectCode}/definition/list/${methodParam}`
})
}
})
.catch((e) => {
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
} else {
// Create
return this.saveDAGchart()
.then((res) => {
this.$message.success(res.msg)
// source @/conf/home/pages/dag/_source/editAffirmModel/index.js
if (sourceType !== 'affirm') {
// Jump process definition
this.$router.push({ name: 'projects-definition-list' })
}
})
.catch((e) => {
this.setName('')
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
}
}
})
.catch((err) => {
let msg = typeof err === 'string' ? err : err.msg || ''
this.$message.error(msg)
})
},
verifyConditions (value) {
let tasks = value
let bool = true
tasks.map((v) => {
if (
v.taskType === 'CONDITIONS' &&
(v.taskParams.conditionResult.successNode[0] === '' ||
v.taskParams.conditionResult.successNode[0] === null ||
v.taskParams.conditionResult.failedNode[0] === '' ||
v.taskParams.conditionResult.failedNode[0] === null)
) {
bool = false
return false
}
})
if (!bool) {
this.$message.warning(
`${this.$t(
'Successful branch flow and failed branch flow are required'
)}`
)
return false
}
return true
},
cancelSave () {
this.toggleSaveDialog(false)
},
/**
* build graph json
*/
buildGraphJSON (tasks, locations, connects) {
const nodes = []
const edges = []
if (!locations) { locations = [] }
tasks.forEach((task) => {
const location = locations.find((l) => l.taskCode === task.code) || {}
const node = this.$refs.canvas.genNodeJSON(
task.code,
task.taskType,
task.name,
{
x: location.x,
y: location.y
}
)
nodes.push(node)
})
connects
.filter((r) => !!r.preTaskCode)
.forEach((c) => {
const edge = this.$refs.canvas.genEdgeJSON(
c.preTaskCode,
c.postTaskCode,
c.name
)
edges.push(edge)
})
return {
nodes,
edges
}
},
/**
* Build connects by edges and tasks
* @param {Edge[]} edges
* @param {Task[]} tasks
* @returns
*/
buildConnects (edges, tasks) {
const preTaskMap = {}
const tasksMap = {}
edges.forEach((edge) => {
preTaskMap[edge.targetId] = {
sourceId: edge.sourceId,
edgeLabel: edge.label || ''
}
})
tasks.forEach((task) => {
tasksMap[task.code] = task
})
const headEdges = tasks
.filter((task) => !preTaskMap[task.code])
.map((task) => {
return {
name: '',
preTaskCode: 0,
preTaskVersion: 0,
postTaskCode: task.code,
postTaskVersion: task.version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
return edges
.map((edge) => {
return {
name: edge.label,
preTaskCode: edge.sourceId,
preTaskVersion: tasksMap[edge.sourceId].version || 0,
postTaskCode: edge.targetId,
postTaskVersion: tasksMap[edge.targetId].version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
.concat(headEdges)
},
backfill () {
const tasks = this.tasks
const locations = this.locations
const connects = this.connects
const json = this.buildGraphJSON(tasks, locations, connects)
this.$refs.canvas.fromJSON(json)
// Auto format
if (!locations) {
this.$refs.canvas.format()
}
},
/**
* Return to the previous process
*/
returnToPrevProcess () {
let $name = this.$route.name.split('-')
let subs = this.$route.query.subs
let ids = subs.split(',')
const last = ids.pop()
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${last}`,
query: ids.length > 0 ? { subs: ids.join(',') } : null
})
},
toSubProcess ({ subProcessCode, subInstanceId }) {
const tarIdentifier =
this.type === 'instance' ? subInstanceId : subProcessCode
const curIdentifier =
this.type === 'instance' ? this.instanceId : this.definitionCode
let subs = []
let olds = this.$route.query.subs
if (olds) {
subs = olds.split(',')
subs.push(curIdentifier)
} else {
subs.push(curIdentifier)
}
let $name = this.$route.name.split('-')
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${tarIdentifier}`,
query: { subs: subs.join(',') }
})
},
seeHistory (taskName) {
this.$router.push({
name: 'task-instance',
query: {
processInstanceId: this.instanceId,
taskName: taskName
}
})
},
/**
* Start dialog
*/
startRunning (taskName) {
this.startTaskName = taskName
this.getStartCheck({ processDefinitionCode: this.definitionCode }).then(
(res) => {
this.startDialog = true
}
)
},
onUpdateStart () {
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
/**
* Task status
*/
refreshTaskStatus () {
const instanceId = this.$route.params.id
this.loading(true)
this.getTaskState(instanceId)
.then((res) => {
this.$message(this.$t('Refresh status succeeded'))
const { taskList } = res.data
const list = res.list
if (taskList) {
this.taskInstances = taskList
taskList.forEach((taskInstance) => {
this.$refs.canvas.setNodeStatus({
code: taskInstance.taskCode,
state: taskInstance.state,
taskInstance
})
})
}
if (list) {
list.forEach((dependent) => {
if (dependent.dependentResult) {
this.setDependResult(JSON.parse(dependent.dependentResult))
}
})
}
})
.finally(() => {
this.loading(false)
})
},
/**
* Loading
* @param {boolean} visible
*/
loading (visible) {
if (visible) {
this.spinner = this.$loading({
lock: true,
text: this.$t('Loading...'),
spinner: 'el-icon-loading',
background: 'rgba(0, 0, 0, 0.4)',
customClass: 'dag-fullscreen-loading'
})
} else {
this.spinner && this.spinner.close()
}
},
/**
* change process definition version
*/
showVersions () {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
code: this.definitionCode
})
.then((res) => {
let processDefinitionVersions = res.data.totalList
let total = res.data.total
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
// this.versionData.processDefinition.id = this.urlParam.id
this.versionData.processDefinition.code = this.definitionCode
this.versionData.processDefinition.version = this.version
this.versionData.processDefinition.releaseState = this.releaseState
this.versionData.processDefinitionVersions =
processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.versionDrawer = true
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
closeVersion () {
this.versionDrawer = false
},
switchProcessVersion ({ version, processDefinitionCode }) {
this.switchProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success($t('Switch Version Successfully'))
this.closeVersion()
this.definitionDetails.init()
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
getProcessVersions ({ pageNo, pageSize, processDefinitionCode }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
code: processDefinitionCode
})
.then((res) => {
this.versionData.processDefinitionVersions = res.data.totalList
this.versionData.total = res.data.total
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
deleteProcessVersion ({ version, processDefinitionCode }) {
this.deleteProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success(res.msg || '')
this.getProcessVersions({
pageNo: 1,
pageSize: 10,
processDefinitionCode: processDefinitionCode
})
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
/**
* Log dialog
*/
closeLogDialog () {
this.logDialog = false
this.logTaskInstance = null
},
showLogDialog (taskDefinitionCode) {
const taskInstance = this.taskInstances.find(taskInstance => {
return taskInstance.taskCode === taskDefinitionCode
})
if (taskInstance) {
this.logTaskInstance = {
id: taskInstance.id,
type: taskInstance.taskType
}
this.logDialog = true
}
}
}
}
</script>
<style lang="scss" scoped>
@import "./dag";
</style>
<style lang="scss">
@import "./loading";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,050 | [Bug] [UI] Forbidden tasks need to be represented by a forbidden icon | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/95271106/149500761-899fb44d-430c-402e-8f09-83fdb90290a0.png)
### What you expected to happen
。
### How to reproduce
。
### Anything else
_No response_
### Version
2.0.2
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8050 | https://github.com/apache/dolphinscheduler/pull/8332 | fc9a31f81375fff820a6a718b606b359073cd892 | 42aab0f47ad65909c7ca498d644642e61a377b29 | "2022-01-14T10:29:19Z" | java | "2022-02-10T10:39:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="dag-canvas">
<dag-taskbar @on-drag-start="onDragStart" />
<div
class="dag-container"
ref="container"
@dragenter.prevent
@dragover.prevent
@dragleave.prevent
@drop.stop.prevent="onDrop"
>
<div ref="paper" class="paper"></div>
<div ref="minimap" class="minimap"></div>
<div class="scale-slider">
<span class="scale-title">{{$t('dagScale')}}</span>
<el-slider
v-model="scale"
vertical
:max="2"
:min="0.2"
:step="0.2"
:marks="SCALE_MARKS"
@input='scaleChange'
/>
</div>
<context-menu ref="contextMenu" />
</div>
<layout-config-modal ref="layoutModal" @submit="format" />
</div>
</template>
<script>
import _ from 'lodash'
import { Graph, DataUri } from '@antv/x6'
import dagTaskbar from './taskbar.vue'
import contextMenu from './contextMenu.vue'
import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue'
import {
NODE,
EDGE,
X6_NODE_NAME,
X6_EDGE_NAME,
NODE_STATUS_MARKUP
} from './x6-helper'
import { DagreLayout, GridLayout } from '@antv/layout'
import { tasksType, tasksState } from '../config'
import { mapActions, mapMutations, mapState } from 'vuex'
import nodeStatus from './nodeStatus'
import x6StyleMixin from './x6-style-mixin'
const SCALE_MARKS = {
0.2: '0.2',
1: '1',
2: '2'
}
export default {
name: 'dag-canvas',
data () {
return {
graph: null,
// Used to calculate the context menu location
originalScrollPosition: {
left: 0,
top: 0
},
editable: true,
dragging: {
// Distance from the mouse to the top-left corner of the dragging element
x: 0,
y: 0,
type: ''
},
// The canvas scale
scale: 1,
SCALE_MARKS
}
},
provide () {
return {
dagCanvas: this
}
},
mixins: [x6StyleMixin],
inject: ['dagChart'],
components: {
dagTaskbar,
contextMenu,
layoutConfigModal
},
computed: {
...mapState('dag', ['tasks'])
},
methods: {
...mapActions('dag', ['genTaskCodeList']),
...mapMutations('dag', ['removeTask']),
/**
* Recalculate the paper width and height
*/
paperResize () {
const w = this.$el.offsetWidth
const h = this.$el.offsetHeight
this.graph.resize(w, h)
},
/**
* Init graph
* This will be called in the dag-chart mounted event
* @param {boolean} uneditable
*/
graphInit (editable) {
const self = this
this.editable = !!editable
const paper = this.$refs.paper
const minimap = this.$refs.minimap
const graph = (this.graph = new Graph({
container: paper,
selecting: {
enabled: true,
multiple: true,
rubberband: true,
rubberEdge: true,
movable: true,
showNodeSelectionBox: false
},
scaling: {
min: 0.2,
max: 2
},
mousewheel: {
enabled: true,
modifiers: ['ctrl', 'meta']
},
scroller: true,
grid: {
size: 10,
visible: true
},
snapline: true,
minimap: {
enabled: true,
container: minimap,
scalable: false,
width: 200,
height: 120
},
interacting: {
edgeLabelMovable: false,
nodeMovable: !!editable,
magnetConnectable: !!editable
},
connecting: {
// Whether multiple edges can be created between the same start node and end
allowMulti: false,
// Whether a point is allowed to connect to a blank position on the canvas
allowBlank: false,
// The start node and the end node are the same node
allowLoop: false,
// Whether an edge is allowed to link to another edge
allowEdge: false,
// Whether edges are allowed to link to nodes
allowNode: true,
// Whether to allow edge links to ports
allowPort: false,
// Whether all available ports or nodes are highlighted when you drag the edge
highlight: true,
createEdge () {
return graph.createEdge({ shape: X6_EDGE_NAME })
},
validateConnection (data) {
const { sourceCell, targetCell } = data
if (
sourceCell &&
targetCell &&
sourceCell.isNode() &&
targetCell.isNode()
) {
const edgeData = {
sourceId: Number(sourceCell.id),
targetId: Number(targetCell.id)
}
if (!self.edgeIsValid(edgeData)) {
return false
}
}
return true
}
},
highlighting: {
nodeAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAdsorbed: {
name: 'className',
args: {
className: 'adsorbed'
}
}
}
}))
this.registerX6Shape()
this.bindGraphEvent()
this.originalScrollPosition = graph.getScrollbarPosition()
},
/**
* Register custom shapes
*/
registerX6Shape () {
Graph.unregisterNode(X6_NODE_NAME)
Graph.unregisterEdge(X6_EDGE_NAME)
Graph.registerNode(X6_NODE_NAME, { ...NODE })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE })
},
/**
* Bind grap event
*/
bindGraphEvent () {
this.bindStyleEvent(this.graph)
// update scale bar
this.graph.on('scale', ({ sx }) => {
this.scale = sx
})
// right click
this.graph.on('node:contextmenu', ({ x, y, cell }) => {
const { left, top } = this.graph.getScrollbarPosition()
const o = this.originalScrollPosition
this.$refs.contextMenu.show(x + (o.left - left), y + (o.top - top))
this.$refs.contextMenu.setCurrentTask({
name: cell.data.taskName,
type: cell.data.taskType,
code: Number(cell.id)
})
})
// node double click
this.graph.on('node:dblclick', ({ cell }) => {
this.dagChart.openFormModel(Number(cell.id), cell.data.taskType)
})
// create edge label
this.graph.on('edge:dblclick', ({ cell }) => {
const labelName = this.getEdgeLabelName(cell)
this.dagChart.$refs.edgeEditModel.show({
id: cell.id,
label: labelName
})
})
// Make sure the edge starts with node, not port
this.graph.on('edge:connected', ({ isNew, edge }) => {
if (isNew) {
const sourceNode = edge.getSourceNode()
edge.setSource(sourceNode)
}
})
// Add a node tool when the mouse entering
this.graph.on('node:mouseenter', ({ e, x, y, node, view }) => {
const nodeName = node.getData().taskName
node.addTools({
name: 'button',
args: {
markup: [
{
tagName: 'text',
textContent: nodeName,
attrs: {
fill: '#868686',
'font-size': 16,
'text-anchor': 'center'
}
}
],
x: 0,
y: 0,
offset: { x: 0, y: -10 }
}
})
})
// Remove all tools when the mouse leaving
this.graph.on('node:mouseleave', ({ node }) => {
node.removeTools()
})
},
/**
* @param {Edge|string} edge
*/
getEdgeLabelName (edge) {
if (typeof edge === 'string') edge = this.graph.getCellById(edge)
const labels = edge.getLabels()
const labelName = _.get(labels, ['0', 'attrs', 'label', 'text'], '')
return labelName
},
/**
* Set edge label by id
* @param {string} id
* @param {string} label
*/
setEdgeLabel (id, label) {
const edge = this.graph.getCellById(id)
edge.setLabels(label)
},
/**
* @param {number} limit
* @param {string} text
* Each Chinese character is equal to two chars
*/
truncateText (text, n) {
const exp = /[\u4E00-\u9FA5]/
let res = ''
let len = text.length
let chinese = text.match(new RegExp(exp, 'g'))
if (chinese) {
len += chinese.length
}
if (len > n) {
let i = 0
let acc = 0
while (true) {
let char = text[i]
if (exp.test(char)) {
acc += 2
} else {
acc++
}
if (acc > n) break
res += char
i++
}
res += '...'
} else {
res = text
}
return res
},
/**
* Set node name by id
* @param {string|number} id
* @param {string} name
*/
setNodeName (id, name) {
id += ''
const node = this.graph.getCellById(id)
if (node) {
const truncation = this.truncateText(name, 18)
node.attr('title/text', truncation)
node.setData({ taskName: name })
}
},
/**
* Convert the graph to JSON
* @return {{cells:Cell[]}}
*/
toJSON () {
return this.graph.toJSON()
},
/**
* Generate graph with JSON
*/
fromJSON (json) {
this.graph.fromJSON(json)
},
/**
* getNodes
* @return {Node[]}
*/
// interface Node {
// id: number;
// position: {x:number;y:number};
// data: {taskType:string;taskName:string;}
// }
getNodes () {
const nodes = this.graph.getNodes()
return nodes.map((node) => {
const position = node.getPosition()
const data = node.getData()
return {
id: Number(node.id),
position: position,
data: data
}
})
},
/**
* getEdges
* @return {Edge[]} Edge is inherited from the Cell
*/
// interface Edge {
// id: string;
// label: string;
// sourceId: number;
// targetId: number;
// }
getEdges () {
const edges = this.graph.getEdges()
return edges.map((edge) => {
const labelData = edge.getLabelAt(0)
return {
id: edge.id,
label: _.get(labelData, ['attrs', 'label', 'text'], ''),
sourceId: Number(edge.getSourceCellId()),
targetId: Number(edge.getTargetCellId())
}
})
},
/**
* downloadPNG
* @param {string} filename
*/
downloadPNG (fileName = 'chart') {
this.graph.toPNG(
(dataUri) => {
DataUri.downloadDataUri(dataUri, `${fileName}.png`)
},
{
padding: {
top: 50,
right: 50,
bottom: 50,
left: 50
},
backgroundColor: '#f2f3f7'
}
)
},
showLayoutModal () {
const layoutModal = this.$refs.layoutModal
if (layoutModal) {
layoutModal.show()
}
},
/**
* format
* @desc Auto layout use @antv/layout
*/
format (layoutConfig) {
if (!layoutConfig) {
layoutConfig = DEFAULT_LAYOUT_CONFIG
}
this.graph.cleanSelection()
let layoutFunc = null
if (layoutConfig.type === LAYOUT_TYPE.DAGRE) {
layoutFunc = new DagreLayout({
type: LAYOUT_TYPE.DAGRE,
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return layoutConfig.ranksep + max
},
nodesep: layoutConfig.nodesep,
controlPoints: true
})
} else if (layoutConfig.type === LAYOUT_TYPE.GRID) {
layoutFunc = new GridLayout({
type: LAYOUT_TYPE.GRID,
preventOverlap: true,
preventOverlapPadding: layoutConfig.padding,
sortBy: '_index',
rows: layoutConfig.rows || undefined,
cols: layoutConfig.cols || undefined,
nodeSize: 220
})
}
const json = this.toJSON()
const nodes = json.cells
.filter((cell) => cell.shape === X6_NODE_NAME)
.map((item) => {
return {
...item,
// sort by code aesc
_index: -item.id
}
})
const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME)
const newModel = layoutFunc.layout({
nodes: nodes,
edges: edges
})
this.fromJSON(newModel)
},
/**
* add a node to the graph
* @param {string|number} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
addNode (id, taskType, coordinate = { x: 100, y: 100 }) {
id += ''
if (!tasksType[taskType]) {
console.warn(`taskType:${taskType} is invalid!`)
return
}
const node = this.genNodeJSON(id, taskType, '', coordinate)
this.graph.addNode(node)
},
/**
* generate node json
* @param {number|string} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
genNodeJSON (id, taskType, taskName, coordinate = { x: 100, y: 100 }) {
id += ''
const url = require(`../images/task-icos/${taskType.toLocaleLowerCase()}.png`)
const truncation = taskName ? this.truncateText(taskName, 18) : id
return {
id: id,
shape: X6_NODE_NAME,
x: coordinate.x,
y: coordinate.y,
data: {
taskType: taskType,
taskName: taskName
},
attrs: {
image: {
// Use href instead of xlink:href, you may lose the icon when downloadPNG
'xlink:href': url
},
title: {
text: truncation
}
}
}
},
/**
* generate edge json
* @param {number|string} sourceId
* @param {number|string} targetId
* @param {string} label
*/
genEdgeJSON (sourceId, targetId, label = '') {
sourceId += ''
targetId += ''
return {
shape: X6_EDGE_NAME,
source: {
cell: sourceId
},
target: {
cell: targetId
},
labels: label ? [label] : undefined
}
},
/**
* remove a node
* @param {string|number} id NodeId
*/
removeNode (id) {
id += ''
this.graph.removeNode(id)
this.removeTask(+id)
},
/**
* remove an edge
* @param {string} id EdgeId
*/
removeEdge (id) {
this.graph.removeEdge(id)
},
/**
* remove multiple cells
* @param {Cell[]} cells
*/
removeCells (cells) {
this.graph.removeCells(cells)
cells.forEach((cell) => {
if (cell.isNode()) {
this.removeTask(+cell.id)
}
})
},
/**
* Verify whether edge is valid
* The number of edges start with CONDITIONS task cannot be greater than 2
*/
edgeIsValid (edge) {
const { sourceId } = edge
const sourceTask = this.tasks.find((task) => task.code === sourceId)
if (sourceTask.taskType === 'CONDITIONS') {
const edges = this.getEdges()
return edges.filter((e) => e.sourceId === sourceTask.code).length <= 2
}
return true
},
/**
* Gets the current selections
* @return {Cell[]}
*/
getSelections () {
return this.graph.getSelectedCells()
},
/**
* Lock scroller
*/
lockScroller () {
this.graph.lockScroller()
},
/**
* Unlock scroller
*/
unlockScroller () {
this.graph.unlockScroller()
},
/**
* set node status icon
* @param {number} code
* @param {string} state
*/
setNodeStatus ({ code, state, taskInstance }) {
code += ''
const stateProps = tasksState[state]
const node = this.graph.getCellById(code)
if (node) {
// Destroy the previous dom
node.removeMarkup()
node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP))
const nodeView = this.graph.findViewByCell(node)
const el = nodeView.find('div')[0]
nodeStatus({
stateProps,
taskInstance
}).$mount(el)
}
},
/**
* Drag && Drop Event
*/
onDragStart (e, taskType) {
if (!this.editable) {
e.preventDefault()
return
}
this.dragging = {
x: e.offsetX,
y: e.offsetY,
type: taskType.name
}
},
onDrop (e) {
const { type } = this.dragging
const { x, y } = this.calcGraphCoordinate(e.clientX, e.clientY)
this.genTaskCodeList({
genNum: 1
})
.then((res) => {
const [code] = res
this.addNode(code, type, { x, y })
this.dagChart.openFormModel(code, type)
})
.catch((err) => {
console.error(err)
})
},
calcGraphCoordinate (mClientX, mClientY) {
// Distance from the mouse to the top-left corner of the container;
const { left: cX, top: cY } =
this.$refs.container.getBoundingClientRect()
const mouseX = mClientX - cX
const mouseY = mClientY - cY
// The distance that paper has been scrolled
const { left: sLeft, top: sTop } = this.graph.getScrollbarPosition()
const { left: oLeft, top: oTop } = this.originalScrollPosition
const scrollX = sLeft - oLeft
const scrollY = sTop - oTop
// Distance from the mouse to the top-left corner of the dragging element;
const { x: eX, y: eY } = this.dragging
return {
x: mouseX + scrollX - eX,
y: mouseY + scrollY - eY
}
},
/**
* Get prev nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPrevNodes(node2.code) => [node1]
*/
getPrevNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.targetId === code)
.map((edge) => nodesMap[edge.sourceId])
},
/**
* set prev nodes
* @param {number} code
* @param {number[]} preNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPreNodes (code, preNodeCodes, override) {
const edges = this.getEdges()
const currPreCodes = []
edges.forEach((edge) => {
if (edge.targetId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPreCodes.push(edge.sourceId)
}
}
})
preNodeCodes.forEach((preCode) => {
if (currPreCodes.includes(preCode) || preCode === code) return
const edge = this.genEdgeJSON(preCode, code)
this.graph.addEdge(edge)
})
},
/**
* Get post nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPostNodes(node2.code) => [node3]
*/
getPostNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.sourceId === code)
.map((edge) => nodesMap[edge.targetId])
},
/**
* set post nodes
* @param {number} code
* @param {number[]} postNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPostNodes (code, postNodeCodes, override) {
const edges = this.getEdges()
const currPostCodes = []
edges.forEach((edge) => {
if (edge.sourceId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPostCodes.push(edge.targetId)
}
}
})
postNodeCodes.forEach((postCode) => {
if (currPostCodes.includes(postCode) || postCode === code) return
const edge = this.genEdgeJSON(code, postCode)
this.graph.addEdge(edge)
})
},
/**
* Navigate to cell
* @param {string} taskName
*/
navigateTo (taskName) {
const nodes = this.getNodes()
nodes.forEach((node) => {
if (node.data.taskName === taskName) {
const id = node.id
const cell = this.graph.getCellById(id)
this.graph.scrollToCell(cell, { animation: { duration: 600 } })
this.graph.cleanSelection()
this.graph.select(cell)
}
})
},
/**
* Canvas scale
*/
scaleChange (val) {
this.graph.zoomTo(val)
}
}
}
</script>
<style lang="scss" scoped>
@import "./canvas";
</style>
<style lang="scss">
@import "./x6-style";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,050 | [Bug] [UI] Forbidden tasks need to be represented by a forbidden icon | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/95271106/149500761-899fb44d-430c-402e-8f09-83fdb90290a0.png)
### What you expected to happen
。
### How to reproduce
。
### Anything else
_No response_
### Version
2.0.2
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8050 | https://github.com/apache/dolphinscheduler/pull/8332 | fc9a31f81375fff820a6a718b606b359073cd892 | 42aab0f47ad65909c7ca498d644642e61a377b29 | "2022-01-14T10:29:19Z" | java | "2022-02-10T10:39:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div :class="['dag-chart', fullScreen ? 'full-screen' : '']">
<dag-toolbar />
<dag-canvas ref="canvas" />
<el-drawer
:visible.sync="taskDrawer"
size=""
:with-header="false"
:wrapperClosable="false"
class="task-drawer"
>
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-form-model
v-if="taskDrawer"
:nodeData="nodeData"
:project-code="projectCode"
@seeHistory="seeHistory"
@addTaskInfo="addTaskInfo"
@close="closeTaskDrawer"
@onSubProcess="toSubProcess"
:type="type"
></m-form-model>
</el-drawer>
<el-dialog
:title="$t('Set the DAG diagram name')"
:visible.sync="saveDialog"
width="auto"
>
<m-udp ref="mUdp" @onUdp="onSave" @close="cancelSave"></m-udp>
</el-dialog>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="auto"
>
<m-start
:startData="{ code: definitionCode, name: name }"
:startNodeList="startTaskName"
:sourceType="'contextmenu'"
@onUpdateStart="onUpdateStart"
@closeStart="closeStart"
></m-start>
</el-dialog>
<edge-edit-model ref="edgeEditModel" />
<el-drawer :visible.sync="versionDrawer" size="" :with-header="false">
<!-- fix the bug that Element-ui(2.13.2) auto focus on the first input -->
<div style="width: 0px; height: 0px; overflow: hidden">
<el-input type="text" />
</div>
<m-versions
:versionData="versionData"
:isInstance="type === 'instance'"
@mVersionSwitchProcessDefinitionVersion="switchProcessVersion"
@mVersionGetProcessDefinitionVersionsPage="getProcessVersions"
@mVersionDeleteProcessDefinitionVersion="deleteProcessVersion"
@closeVersion="closeVersion"
></m-versions>
</el-drawer>
<m-log
v-if="type === 'instance' && logDialog"
:item="logTaskInstance"
source='dag'
:task-instance-id="logTaskInstance.id"
@close="closeLogDialog"
></m-log>
</div>
</template>
<script>
import { debounce } from 'lodash'
import dagToolbar from './canvas/toolbar.vue'
import dagCanvas from './canvas/canvas.vue'
import mFormModel from '../_source/formModel/formModel.vue'
import { mapActions, mapState, mapMutations } from 'vuex'
import mUdp from '../_source/udp/udp.vue'
import mStart from '../../projects/pages/definition/pages/list/_source/start.vue'
import edgeEditModel from './canvas/edgeEditModel.vue'
import mVersions from '../../projects/pages/definition/pages/list/_source/versions.vue'
import mLog from './formModel/log.vue'
const DEFAULT_NODE_DATA = {
id: null,
taskType: '',
self: {},
instanceId: null
}
export default {
name: 'dag-chart',
components: {
dagCanvas,
dagToolbar,
mFormModel,
mUdp,
mStart,
edgeEditModel,
mVersions,
mLog
},
provide () {
return {
dagChart: this
}
},
inject: ['definitionDetails'],
props: {
type: String,
releaseState: String
},
data () {
return {
definitionCode: 0,
// full screen mode
fullScreen: false,
// whether the task config drawer is visible
taskDrawer: false,
nodeData: { ...DEFAULT_NODE_DATA },
// whether the save dialog is visible
saveDialog: false,
// whether the start dialog is visible
startDialog: false,
startTaskName: '',
// whether the version drawer is visible
versionDrawer: false,
versionData: {
processDefinition: {
id: null,
version: '',
releaseState: ''
},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
// the task status refresh timer
statusTimer: null,
// the process instance id
instanceId: -1,
// log dialog
logDialog: false,
logTaskInstance: null,
taskInstances: []
}
},
mounted () {
this.setIsEditDag(false)
if (this.type === 'instance') {
this.instanceId = this.$route.params.id
this.definitionCode = this.$route.query.code || this.code
} else if (this.type === 'definition') {
this.definitionCode = this.$route.params.code
}
// auto resize canvas
this.resizeDebounceFunc = debounce(this.canvasResize, 200)
window.addEventListener('resize', this.resizeDebounceFunc)
// init graph
this.$refs.canvas.graphInit(!this.isDetails)
// backfill graph with tasks, locations and connects
this.backfill()
// refresh task status
if (this.type === 'instance') {
this.refreshTaskStatus()
// status polling
this.statusTimer = setInterval(() => {
this.refreshTaskStatus()
}, 90000)
}
},
beforeDestroy () {
this.resetParams()
clearInterval(this.statusTimer)
window.removeEventListener('resize', this.resizeDebounceFunc)
},
computed: {
...mapState('dag', [
'tasks',
'locations',
'connects',
'name',
'isDetails',
'projectCode',
'version',
'code'
])
},
methods: {
...mapActions('dag', [
'saveDAGchart',
'updateInstance',
'updateDefinition',
'getTaskState',
'getStartCheck',
'genTaskCodeList',
'switchProcessDefinitionVersion',
'getProcessDefinitionVersionsPage',
'deleteProcessDefinitionVersion'
]),
...mapMutations('dag', [
'addTask',
'setConnects',
'resetParams',
'setIsEditDag',
'setName',
'setLocations',
'resetLocalParam',
'setDependResult'
]),
/**
* Toggle full screen
*/
canvasResize () {
const canvas = this.$refs.canvas
canvas && canvas.paperResize()
},
toggleFullScreen () {
this.fullScreen = !this.fullScreen
this.$nextTick(this.canvasResize)
},
/**
* Task Drawer
* @param {boolean} visible
*/
toggleTaskDrawer (visible) {
this.taskDrawer = visible
},
/**
* Set the current node data
*/
setNodeData (nodeData) {
this.nodeData = Object.assign(DEFAULT_NODE_DATA, nodeData)
},
/**
* open form model
* @desc Edit task config
* @param {number} taskCode
* @param {string} taskType
*/
openFormModel (taskCode, taskType) {
this.setNodeData({
id: taskCode,
taskType: taskType
})
this.toggleTaskDrawer(true)
},
addTaskInfo ({ item }) {
this.addTask(item)
this.$refs.canvas.setNodeName(item.code, item.name)
this.taskDrawer = false
},
closeTaskDrawer ({ flag }) {
if (flag) {
const canvas = this.$refs.canvas
canvas.removeNode(this.nodeData.id)
}
this.taskDrawer = false
},
/**
* Save dialog
*/
toggleSaveDialog (value) {
this.saveDialog = value
if (value) {
this.$nextTick(() => {
this.$refs.mUdp.reloadParam()
})
}
},
onSave (sourceType) {
this.toggleSaveDialog(false)
return new Promise((resolve, reject) => {
let tasks = this.tasks || []
const edges = this.$refs.canvas.getEdges()
const nodes = this.$refs.canvas.getNodes()
if (!nodes.length) {
reject(this.$t('Failed to create node to save'))
}
const connects = this.buildConnects(edges, tasks)
this.setConnects(connects)
const locations = nodes.map((node) => {
return {
taskCode: node.id,
x: node.position.x,
y: node.position.y
}
})
this.setLocations(locations)
resolve({
connects: connects,
tasks: tasks,
locations: locations
})
})
.then((res) => {
if (this.verifyConditions(res.tasks)) {
this.loading(true)
const isEdit = !!this.definitionCode
if (isEdit) {
const methodName = this.type === 'instance' ? 'updateInstance' : 'updateDefinition'
const methodParam = this.type === 'instance' ? this.instanceId : this.definitionCode
// Edit
return this[methodName](methodParam)
.then((res) => {
this.$message({
message: res.msg,
type: 'success',
offset: 80
})
if (this.type === 'instance') {
this.$router.push({
path: `/projects/${this.projectCode}/instance/list/${methodParam}`
})
} else {
this.$router.push({
path: `/projects/${this.projectCode}/definition/list/${methodParam}`
})
}
})
.catch((e) => {
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
} else {
// Create
return this.saveDAGchart()
.then((res) => {
this.$message.success(res.msg)
// source @/conf/home/pages/dag/_source/editAffirmModel/index.js
if (sourceType !== 'affirm') {
// Jump process definition
this.$router.push({ name: 'projects-definition-list' })
}
})
.catch((e) => {
this.setName('')
this.$message.error(e.msg || '')
})
.finally((e) => {
this.loading(false)
})
}
}
})
.catch((err) => {
let msg = typeof err === 'string' ? err : err.msg || ''
this.$message.error(msg)
})
},
verifyConditions (value) {
let tasks = value
let bool = true
tasks.map((v) => {
if (
v.taskType === 'CONDITIONS' &&
(v.taskParams.conditionResult.successNode[0] === '' ||
v.taskParams.conditionResult.successNode[0] === null ||
v.taskParams.conditionResult.failedNode[0] === '' ||
v.taskParams.conditionResult.failedNode[0] === null)
) {
bool = false
return false
}
})
if (!bool) {
this.$message.warning(
`${this.$t(
'Successful branch flow and failed branch flow are required'
)}`
)
return false
}
return true
},
cancelSave () {
this.toggleSaveDialog(false)
},
/**
* build graph json
*/
buildGraphJSON (tasks, locations, connects) {
const nodes = []
const edges = []
if (!locations) { locations = [] }
tasks.forEach((task) => {
const location = locations.find((l) => l.taskCode === task.code) || {}
const node = this.$refs.canvas.genNodeJSON(
task.code,
task.taskType,
task.name,
{
x: location.x,
y: location.y
}
)
nodes.push(node)
})
connects
.filter((r) => !!r.preTaskCode)
.forEach((c) => {
const edge = this.$refs.canvas.genEdgeJSON(
c.preTaskCode,
c.postTaskCode,
c.name
)
edges.push(edge)
})
return {
nodes,
edges
}
},
/**
* Build connects by edges and tasks
* @param {Edge[]} edges
* @param {Task[]} tasks
* @returns
*/
buildConnects (edges, tasks) {
const preTaskMap = {}
const tasksMap = {}
edges.forEach((edge) => {
preTaskMap[edge.targetId] = {
sourceId: edge.sourceId,
edgeLabel: edge.label || ''
}
})
tasks.forEach((task) => {
tasksMap[task.code] = task
})
const headEdges = tasks
.filter((task) => !preTaskMap[task.code])
.map((task) => {
return {
name: '',
preTaskCode: 0,
preTaskVersion: 0,
postTaskCode: task.code,
postTaskVersion: task.version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
return edges
.map((edge) => {
return {
name: edge.label,
preTaskCode: edge.sourceId,
preTaskVersion: tasksMap[edge.sourceId].version || 0,
postTaskCode: edge.targetId,
postTaskVersion: tasksMap[edge.targetId].version || 0,
// conditionType and conditionParams are reserved
conditionType: 0,
conditionParams: {}
}
})
.concat(headEdges)
},
backfill () {
const tasks = this.tasks
const locations = this.locations
const connects = this.connects
const json = this.buildGraphJSON(tasks, locations, connects)
this.$refs.canvas.fromJSON(json)
// Auto format
if (!locations) {
this.$refs.canvas.format()
}
},
/**
* Return to the previous process
*/
returnToPrevProcess () {
let $name = this.$route.name.split('-')
let subs = this.$route.query.subs
let ids = subs.split(',')
const last = ids.pop()
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${last}`,
query: ids.length > 0 ? { subs: ids.join(',') } : null
})
},
toSubProcess ({ subProcessCode, subInstanceId }) {
const tarIdentifier =
this.type === 'instance' ? subInstanceId : subProcessCode
const curIdentifier =
this.type === 'instance' ? this.instanceId : this.definitionCode
let subs = []
let olds = this.$route.query.subs
if (olds) {
subs = olds.split(',')
subs.push(curIdentifier)
} else {
subs.push(curIdentifier)
}
let $name = this.$route.name.split('-')
this.$router.push({
path: `/${$name[0]}/${this.projectCode}/${$name[1]}/list/${tarIdentifier}`,
query: { subs: subs.join(',') }
})
},
seeHistory (taskName) {
this.$router.push({
name: 'task-instance',
query: {
processInstanceId: this.instanceId,
taskName: taskName
}
})
},
/**
* Start dialog
*/
startRunning (taskName) {
this.startTaskName = taskName
this.getStartCheck({ processDefinitionCode: this.definitionCode }).then(
(res) => {
this.startDialog = true
}
)
},
onUpdateStart () {
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
/**
* Task status
*/
refreshTaskStatus () {
const instanceId = this.$route.params.id
this.loading(true)
this.getTaskState(instanceId)
.then((res) => {
this.$message(this.$t('Refresh status succeeded'))
const { taskList } = res.data
const list = res.list
if (taskList) {
this.taskInstances = taskList
taskList.forEach((taskInstance) => {
this.$refs.canvas.setNodeStatus({
code: taskInstance.taskCode,
state: taskInstance.state,
taskInstance
})
})
}
if (list) {
list.forEach((dependent) => {
if (dependent.dependentResult) {
this.setDependResult(JSON.parse(dependent.dependentResult))
}
})
}
})
.finally(() => {
this.loading(false)
})
},
/**
* Loading
* @param {boolean} visible
*/
loading (visible) {
if (visible) {
this.spinner = this.$loading({
lock: true,
text: this.$t('Loading...'),
spinner: 'el-icon-loading',
background: 'rgba(0, 0, 0, 0.4)',
customClass: 'dag-fullscreen-loading'
})
} else {
this.spinner && this.spinner.close()
}
},
/**
* change process definition version
*/
showVersions () {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
code: this.definitionCode
})
.then((res) => {
let processDefinitionVersions = res.data.totalList
let total = res.data.total
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
// this.versionData.processDefinition.id = this.urlParam.id
this.versionData.processDefinition.code = this.definitionCode
this.versionData.processDefinition.version = this.version
this.versionData.processDefinition.releaseState = this.releaseState
this.versionData.processDefinitionVersions =
processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.versionDrawer = true
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
closeVersion () {
this.versionDrawer = false
},
switchProcessVersion ({ version, processDefinitionCode }) {
this.switchProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success($t('Switch Version Successfully'))
this.closeVersion()
this.definitionDetails.init()
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
getProcessVersions ({ pageNo, pageSize, processDefinitionCode }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
code: processDefinitionCode
})
.then((res) => {
this.versionData.processDefinitionVersions = res.data.totalList
this.versionData.total = res.data.total
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
deleteProcessVersion ({ version, processDefinitionCode }) {
this.deleteProcessDefinitionVersion({
version: version,
code: processDefinitionCode
})
.then((res) => {
this.$message.success(res.msg || '')
this.getProcessVersions({
pageNo: 1,
pageSize: 10,
processDefinitionCode: processDefinitionCode
})
})
.catch((e) => {
this.$message.error(e.msg || '')
})
},
/**
* Log dialog
*/
closeLogDialog () {
this.logDialog = false
this.logTaskInstance = null
},
showLogDialog (taskDefinitionCode) {
const taskInstance = this.taskInstances.find(taskInstance => {
return taskInstance.taskCode === taskDefinitionCode
})
if (taskInstance) {
this.logTaskInstance = {
id: taskInstance.id,
type: taskInstance.taskType
}
this.logDialog = true
}
}
}
}
</script>
<style lang="scss" scoped>
@import "./dag";
</style>
<style lang="scss">
@import "./loading";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,335 | [Bug] [UI] Sqoop node change data source type to hive error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/02/10/Y4bivoBSDHt2cIe.png)
### What you expected to happen
change successfully.
### How to reproduce
change successfully.
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8335 | https://github.com/apache/dolphinscheduler/pull/8348 | 13d9031b687dc51bb8dc424cbe02053807c081fb | 0f04b3cfaaafbb5faeb1c48328ed60c49e17977f | "2022-02-10T10:02:58Z" | java | "2022-02-11T08:49:51Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="sqoop-model">
<m-list-box>
<div slot="text">{{$t('Custom Job')}}</div>
<div slot="content">
<el-switch size="small" v-model="isCustomTask" @change="_onSwitch" :disabled="isDetails"></el-switch>
</div>
</m-list-box>
<m-list-box v-show="isCustomTask">
<div slot="text">{{$t('Custom Script')}}</div>
<div slot="content">
<div class="form-mirror">
<textarea id="code-shell-mirror" name="code-shell-mirror" style="opacity: 0;"></textarea>
</div>
</div>
</m-list-box>
<template v-if="!isCustomTask">
<m-list-box>
<div slot="text">{{$t('Sqoop Job Name')}}</div>
<div slot="content">
<el-input :disabled="isDetails" size="small" type="text" v-model="jobName" :placeholder="$t('Please enter Job Name(required)')"></el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Direct')}}</div>
<div slot="content">
<el-select
style="width: 130px;"
size="small"
v-model="modelType"
:disabled="isDetails"
@change="_handleModelTypeChange">
<el-option
v-for="city in modelTypeList"
:key="city.code"
:value="city.code"
:label="city.code">
</el-option>
</el-select>
</div>
</m-list-box>
<m-list-box>
<div slot="text" style="width: 110px;">{{$t('Hadoop Custom Params')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnHadoopParams"
@on-local-params="_onHadoopCustomParams"
:udp-list="hadoopCustomParams"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<m-list-box>
<div slot="text" style="width: 100px;">{{$t('Sqoop Advanced Parameters')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnAdvancedParams"
@on-local-params="_onSqoopAdvancedParams"
:udp-list="sqoopAdvancedParams"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<m-list-box>
<div slot="text" style="font-weight:bold">{{$t('Data Source')}}</div>
</m-list-box>
<hr style="margin-left: 60px;">
<m-list-box>
<div slot="text">{{$t('Type')}}</div>
<div slot="content">
<el-select
style="width: 130px;"
size="small"
v-model="sourceType"
:disabled="isDetails"
@change="_handleSourceTypeChange">
<el-option
v-for="city in sourceTypeList"
:key="city.code"
:value="city.code"
:label="city.code">
</el-option>
</el-select>
</div>
</m-list-box>
<template v-if="sourceType === 'MYSQL'">
<m-list-box>
<div slot="text">{{$t('Datasource')}}</div>
<div slot="content">
<m-datasource
ref="refSourceDs"
@on-dsData="_onSourceDsData"
:data="{type:sourceMysqlParams.srcType,
typeList: [{id: 0, code: 'MYSQL', disabled: false}],
datasource:sourceMysqlParams.srcDatasource }"
>
</m-datasource>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('ModelType')}}</div>
<div slot="content">
<el-radio-group v-model="srcQueryType" size="small" @change="_handleQueryType">
<el-radio label="0">{{$t('Form')}}</el-radio>
<el-radio label="1">SQL</el-radio>
</el-radio-group>
</div>
</m-list-box>
<template v-if="sourceMysqlParams.srcQueryType === '0'">
<m-list-box>
<div slot="text">{{$t('Table')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceMysqlParams.srcTable"
:placeholder="$t('Please enter Mysql Table(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('ColumnType')}}</div>
<div slot="content">
<el-radio-group v-model="sourceMysqlParams.srcColumnType" size="small" style="vertical-align: sub;">
<el-radio label="0">{{$t('All Columns')}}</el-radio>
<el-radio label="1">{{$t('Some Columns')}}</el-radio>
</el-radio-group>
</div>
</m-list-box>
<m-list-box v-if="sourceMysqlParams.srcColumnType === '1'">
<div slot="text">{{$t('Column')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceMysqlParams.srcColumns"
:placeholder="$t('Please enter Columns (Comma separated)')">
</el-input>
</div>
</m-list-box>
</template>
</template>
<template v-if="sourceType === 'HIVE'">
<m-list-box>
<div slot="text">{{$t('Database')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceHiveParams.hiveDatabase"
:placeholder="$t('Please enter Hive Database(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Table')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceHiveParams.hiveTable"
:placeholder="$t('Please enter Hive Table(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Hive partition Keys')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceHiveParams.hivePartitionKey"
:placeholder="$t('Please enter Hive Partition Keys')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Hive partition Values')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceHiveParams.hivePartitionValue"
:placeholder="$t('Please enter Hive Partition Values')">
</el-input>
</div>
</m-list-box>
</template>
<template v-if="sourceType === 'HDFS'">
<m-list-box>
<div slot="text">{{$t('Export Dir')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="sourceHdfsParams.exportDir"
:placeholder="$t('Please enter Export Dir(required)')">
</el-input>
</div>
</m-list-box>
</template>
<template v-if="sourceType === 'MYSQL'">
<m-list-box v-show="srcQueryType === '1'">
<div slot="text">{{$t('SQL Statement')}}</div>
<div slot="content">
<div class="form-mirror">
<textarea
id="code-sqoop-mirror"
name="code-sqoop-mirror"
style="opacity: 0;">
</textarea>
<a class="ans-modal-box-max">
<em class="el-icon-full-screen" @click="setEditorVal"></em>
</a>
</div>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Map Column Hive')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnHiveParams"
@on-local-params="_onMapColumnHive"
:udp-list="sourceMysqlParams.mapColumnHive"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Map Column Java')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnJavaParams"
@on-local-params="_onMapColumnJava"
:udp-list="sourceMysqlParams.mapColumnJava"
:hide="false">
</m-local-params>
</div>
</m-list-box>
</template>
<m-list-box>
<div slot="text" style="font-weight:bold">{{$t('Data Target')}}</div>
</m-list-box>
<hr style="margin-left: 60px;">
<m-list-box>
<div slot="text">{{$t('Type')}}</div>
<div slot="content">
<el-select
style="width: 130px;"
size="small"
v-model="targetType"
:disabled="isDetails">
<el-option
v-for="city in targetTypeList"
:key="city.code"
:value="city.code"
:label="city.code">
</el-option>
</el-select>
</div>
</m-list-box>
<template v-if="targetType === 'HIVE'">
<m-list-box>
<div slot="text">{{$t('Database')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.hiveDatabase"
:placeholder="$t('Please enter Hive Database(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Table')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.hiveTable"
:placeholder="$t('Please enter Hive Table(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('CreateHiveTable')}}</div>
<div slot="content">
<el-switch v-model="targetHiveParams.createHiveTable" size="small"></el-switch>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('DropDelimiter')}}</div>
<div slot="content">
<el-switch v-model="targetHiveParams.dropDelimiter" size="small"></el-switch>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('OverWriteSrc')}}</div>
<div slot="content">
<el-switch v-model="targetHiveParams.hiveOverWrite" size="small"></el-switch>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Hive Target Dir')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.hiveTargetDir"
:placeholder="$t('Please enter hive target dir')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('ReplaceDelimiter')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.replaceDelimiter"
:placeholder="$t('Please enter Replace Delimiter')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Hive partition Keys')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.hivePartitionKey"
:placeholder="$t('Please enter Hive Partition Keys')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Hive partition Values')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHiveParams.hivePartitionValue"
:placeholder="$t('Please enter Hive Partition Values')">
</el-input>
</div>
</m-list-box>
</template>
<template v-if="targetType === 'HDFS'">
<m-list-box>
<div slot="text">{{$t('Target Dir')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHdfsParams.targetPath"
:placeholder="$t('Please enter Target Dir(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('DeleteTargetDir')}}</div>
<div slot="content">
<el-switch v-model="targetHdfsParams.deleteTargetDir" size="small"></el-switch>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('CompressionCodec')}}</div>
<div slot="content">
<el-radio-group v-model="targetHdfsParams.compressionCodec" size="small">
<el-radio label="snappy">snappy</el-radio>
<el-radio label="lzo">lzo</el-radio>
<el-radio label="gzip">gzip</el-radio>
<el-radio label="">no</el-radio>
</el-radio-group>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('FileType')}}</div>
<div slot="content">
<el-radio-group v-model="targetHdfsParams.fileType" size="small">
<el-radio label="--as-avrodatafile">avro</el-radio>
<el-radio label="--as-sequencefile">sequence</el-radio>
<el-radio label="--as-textfile">text</el-radio>
<el-radio label="--as-parquetfile">parquet</el-radio>
</el-radio-group>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('FieldsTerminated')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHdfsParams.fieldsTerminated"
:placeholder="$t('Please enter Fields Terminated')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('LinesTerminated')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetHdfsParams.linesTerminated"
:placeholder="$t('Please enter Lines Terminated')">
</el-input>
</div>
</m-list-box>
</template>
<template v-if="targetType === 'MYSQL'">
<m-list-box>
<div slot="text">{{$t('Datasource')}}</div>
<div slot="content">
<m-datasource
ref="refTargetDs"
@on-dsData="_onTargetDsData"
:data="{ type:targetMysqlParams.targetType,
typeList: [{id: 0, code: 'MYSQL', disabled: false}],
datasource:targetMysqlParams.targetDatasource }"
>
</m-datasource>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Table')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetMysqlParams.targetTable"
:placeholder="$t('Please enter Mysql Table(required)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('Column')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetMysqlParams.targetColumns"
:placeholder="$t('Please enter Columns (Comma separated)')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('FieldsTerminated')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetMysqlParams.fieldsTerminated"
:placeholder="$t('Please enter Fields Terminated')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('LinesTerminated')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetMysqlParams.linesTerminated"
:placeholder="$t('Please enter Lines Terminated')">
</el-input>
</div>
</m-list-box>
<m-list-box>
<div slot="text">{{$t('IsUpdate')}}</div>
<div slot="content">
<el-switch v-model="targetMysqlParams.isUpdate" size="small"></el-switch>
</div>
</m-list-box>
<m-list-box v-show="targetMysqlParams.isUpdate">
<div slot="text">{{$t('UpdateKey')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="targetMysqlParams.targetUpdateKey"
:placeholder="$t('Please enter Update Key')">
</el-input>
</div>
</m-list-box>
<m-list-box v-show="targetMysqlParams.isUpdate">
<div slot="text">{{$t('UpdateMode')}}</div>
<div slot="content">
<el-radio-group v-model="targetMysqlParams.targetUpdateMode" size="small">
<el-radio label="updateonly">{{$t('OnlyUpdate')}}</el-radio>
<el-radio label="allowinsert">{{$t('AllowInsert')}}</el-radio>
</el-radio-group>
</div>
</m-list-box>
</template>
<m-list-box>
<div slot="text">{{$t('Concurrency')}}</div>
<div slot="content">
<el-input
:disabled="isDetails"
type="text"
size="small"
v-model="concurrency"
:placeholder="$t('Please enter Concurrency')">
</el-input>
</div>
</m-list-box>
</template>
<m-list-box>
<div slot="text">{{$t('Custom Parameters')}}</div>
<div slot="content">
<m-local-params
ref="refLocalParams"
@on-local-params="_onLocalParams"
:udp-list="localParams"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<el-dialog
:visible.sync="scriptBoxDialog"
append-to-body="true"
width="80%">
<m-script-box :item="item" @getSriptBoxValue="getSriptBoxValue" @closeAble="closeAble"></m-script-box>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import mListBox from './_source/listBox'
import mScriptBox from './_source/scriptBox'
import mDatasource from './_source/datasource'
import mLocalParams from './_source/localParams'
import disabledState from '@/module/mixin/disabledState'
import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror'
let editor
let shellEditor
export default {
name: 'sql',
data () {
return {
/**
* Is Custom Task
*/
isCustomTask: false,
/**
* Customer Params
*/
localParams: [],
/**
* Hadoop Custom Params
*/
hadoopCustomParams: [],
/**
* Sqoop Advanced Params
*/
sqoopAdvancedParams: [],
/**
* script
*/
customShell: '',
/**
* task name
*/
jobName: '',
/**
* mysql query type
*/
srcQueryType: '1',
/**
* source data source
*/
srcDatasource: '',
/**
* target data source
*/
targetDatasource: '',
/**
* concurrency
*/
concurrency: 1,
/**
* default job type
*/
jobType: 'TEMPLATE',
/**
* direct model type
*/
modelType: 'import',
modelTypeList: [{ code: 'import' }, { code: 'export' }],
sourceTypeList: [
{
code: 'MYSQL'
}
],
targetTypeList: [
{
code: 'HIVE'
},
{
code: 'HDFS'
}
],
sourceType: 'MYSQL',
targetType: 'HDFS',
sourceMysqlParams: {
srcType: 'MYSQL',
srcDatasource: '',
srcTable: '',
srcQueryType: '1',
srcQuerySql: '',
srcColumnType: '0',
srcColumns: '',
srcConditionList: [],
mapColumnHive: [],
mapColumnJava: []
},
sourceHdfsParams: {
exportDir: ''
},
sourceHiveParams: {
hiveDatabase: '',
hiveTable: '',
hivePartitionKey: '',
hivePartitionValue: ''
},
targetHdfsParams: {
targetPath: '',
deleteTargetDir: true,
fileType: '--as-avrodatafile',
compressionCodec: 'snappy',
fieldsTerminated: '',
linesTerminated: ''
},
targetMysqlParams: {
targetType: 'MYSQL',
targetDatasource: '',
targetTable: '',
targetColumns: '',
fieldsTerminated: '',
linesTerminated: '',
preQuery: '',
isUpdate: false,
targetUpdateKey: '',
targetUpdateMode: 'allowinsert'
},
targetHiveParams: {
hiveDatabase: '',
hiveTable: '',
createHiveTable: false,
dropDelimiter: false,
hiveOverWrite: true,
replaceDelimiter: '',
hiveTargetDir: '',
hivePartitionKey: '',
hivePartitionValue: ''
},
item: '',
scriptBoxDialog: false
}
},
mixins: [disabledState],
props: {
backfillItem: Object
},
methods: {
setEditorVal () {
this.item = editor.getValue()
this.scriptBoxDialog = true
},
getSriptBoxValue (val) {
editor.setValue(val)
},
_onSwitch (is) {
if (is) {
this.jobType = 'CUSTOM'
this.isCustomTask = true
setTimeout(() => {
this._handlerShellEditor()
}, 200)
} else {
this.jobType = 'TEMPLATE'
this.isCustomTask = false
if (this.srcQueryType === '1') {
setTimeout(() => {
this._handlerEditor()
}, 200)
}
}
},
_handleQueryType (o) {
this.sourceMysqlParams.srcQueryType = this.srcQueryType
this._getTargetTypeList(this.sourceType)
this.targetType = this.targetTypeList[0].code
if (this.srcQueryType === '1') {
setTimeout(() => {
this._handlerEditor()
}, 200)
}
},
_handleModelTypeChange (a) {
this._getSourceTypeList(a)
this.sourceType = this.sourceTypeList[0].code
this._handleSourceTypeChange({ label: this.sourceType, value: this.sourceType })
},
_handleSourceTypeChange (a) {
this._getTargetTypeList(a.label)
this.targetType = this.targetTypeList[0].code
},
_getSourceTypeList (data) {
switch (data) {
case 'import':
this.sourceTypeList = [
{
code: 'MYSQL'
}
]
break
case 'export':
this.sourceTypeList = [
{
code: 'HDFS'
},
{
code: 'HIVE'
}
]
break
default:
this.sourceTypeList = [
{
code: 'MYSQL'
},
{
code: 'HIVE'
},
{
code: 'HDFS'
}
]
break
}
},
_getTargetTypeList (data) {
switch (data) {
case 'MYSQL':
if (this.srcQueryType === '1') {
this.targetTypeList = [
{
code: 'HDFS'
}]
} else {
this.targetTypeList = [
{
code: 'HIVE'
},
{
code: 'HDFS'
}
]
}
break
case 'HDFS':
this.targetTypeList = [
{
code: 'MYSQL'
}
]
break
case 'HIVE':
this.targetTypeList = [
{
code: 'MYSQL'
}
]
break
default:
this.targetTypeList = [
{
code: 'HIVE'
},
{
code: 'HDFS'
}
]
break
}
},
_onMapColumnHive (a) {
this.sourceMysqlParams.mapColumnHive = a
},
_onMapColumnJava (a) {
this.sourceMysqlParams.mapColumnJava = a
},
/**
* return data source
*/
_onSourceDsData (o) {
this.sourceMysqlParams.srcType = o.type
this.sourceMysqlParams.srcDatasource = o.datasource
},
/**
* return data source
*/
_onTargetDsData (o) {
this.targetMysqlParams.targetType = o.type
this.targetMysqlParams.targetDatasource = o.datasource
},
/**
* stringify the source params
*/
_handleSourceParams () {
let params = null
switch (this.sourceType) {
case 'MYSQL':
this.sourceMysqlParams.srcQuerySql = this.sourceMysqlParams.srcQueryType === '1' && editor
? editor.getValue() : this.sourceMysqlParams.srcQuerySql
params = JSON.stringify(this.sourceMysqlParams)
break
case 'ORACLE':
params = JSON.stringify(this.sourceOracleParams)
break
case 'HDFS':
params = JSON.stringify(this.sourceHdfsParams)
break
case 'HIVE':
params = JSON.stringify(this.sourceHiveParams)
break
default:
params = ''
break
}
return params
},
/**
* stringify the target params
*/
_handleTargetParams () {
let params = null
switch (this.targetType) {
case 'HIVE':
params = JSON.stringify(this.targetHiveParams)
break
case 'HDFS':
params = JSON.stringify(this.targetHdfsParams)
break
case 'MYSQL':
params = JSON.stringify(this.targetMysqlParams)
break
default:
params = ''
break
}
return params
},
/**
* get source params by source type
*/
_getSourceParams (data) {
switch (this.sourceType) {
case 'MYSQL':
this.sourceMysqlParams = JSON.parse(data)
this.srcDatasource = this.sourceMysqlParams.srcDatasource
break
case 'ORACLE':
this.sourceOracleParams = JSON.parse(data)
break
case 'HDFS':
this.sourceHdfsParams = JSON.parse(data)
break
case 'HIVE':
this.sourceHiveParams = JSON.parse(data)
break
default:
break
}
},
/**
* get target params by target type
*/
_getTargetParams (data) {
switch (this.targetType) {
case 'HIVE':
this.targetHiveParams = JSON.parse(data)
break
case 'HDFS':
this.targetHdfsParams = JSON.parse(data)
break
case 'MYSQL':
this.targetMysqlParams = JSON.parse(data)
this.targetDatasource = this.targetMysqlParams.targetDatasource
break
default:
break
}
},
/**
* verification
*/
_verification () {
// localParams Subcomponent verification
if (!this.$refs.refLocalParams._verifProp()) {
return false
}
let sqoopParams = {
jobType: this.jobType,
localParams: this.localParams
}
if (this.jobType === 'CUSTOM') {
if (!shellEditor.getValue()) {
this.$message.warning(`${i18n.$t('Please enter Custom Shell(required)')}`)
return false
}
sqoopParams.customShell = shellEditor.getValue()
} else {
if (!this.jobName) {
this.$message.warning(`${i18n.$t('Please enter Job Name(required)')}`)
return false
}
switch (this.sourceType) {
case 'MYSQL':
if (!this.$refs.refSourceDs._verifDatasource()) {
return false
}
if (this.srcQueryType === '1') {
if (!editor.getValue()) {
this.$message.warning(`${i18n.$t('Please enter a SQL Statement(required)')}`)
return false
}
this.sourceMysqlParams.srcTable = ''
this.sourceMysqlParams.srcColumnType = '0'
this.sourceMysqlParams.srcColumns = ''
} else {
if (this.sourceMysqlParams.srcTable === '') {
this.$message.warning(`${i18n.$t('Please enter Mysql Table(required)')}`)
return false
}
this.sourceMysqlParams.srcQuerySql = ''
if (this.sourceMysqlParams.srcColumnType === '1' && this.sourceMysqlParams.srcColumns === '') {
this.$message.warning(`${i18n.$t('Please enter Columns (Comma separated)')}`)
return false
}
if (this.sourceMysqlParams.srcColumnType === '0') {
this.sourceMysqlParams.srcColumns = ''
}
}
break
case 'HDFS':
if (this.sourceHdfsParams.exportDir === '') {
this.$message.warning(`${i18n.$t('Please enter Export Dir(required)')}`)
return false
}
break
case 'HIVE':
if (this.sourceHiveParams.hiveDatabase === '') {
this.$message.warning(`${i18n.$t('Please enter Hive Database(required)')}`)
return false
}
if (this.sourceHiveParams.hiveTable === '') {
this.$message.warning(`${i18n.$t('Please enter Hive Table(required)')}`)
return false
}
break
default:
break
}
switch (this.targetType) {
case 'HIVE':
if (this.targetHiveParams.hiveDatabase === '') {
this.$message.warning(`${i18n.$t('Please enter Hive Database(required)')}`)
return false
}
if (this.targetHiveParams.hiveTable === '') {
this.$message.warning(`${i18n.$t('Please enter Hive Table(required)')}`)
return false
}
break
case 'HDFS':
if (this.targetHdfsParams.targetPath === '') {
this.$message.warning(`${i18n.$t('Please enter Target Dir(required)')}`)
return false
}
break
case 'MYSQL':
if (!this.$refs.refTargetDs._verifDatasource()) {
return false
}
if (this.targetMysqlParams.targetTable === '') {
this.$message.warning(`${i18n.$t('Please enter Mysql Table(required)')}`)
return false
}
break
default:
break
}
sqoopParams.jobName = this.jobName
sqoopParams.hadoopCustomParams = this.hadoopCustomParams
sqoopParams.sqoopAdvancedParams = this.sqoopAdvancedParams
sqoopParams.concurrency = this.concurrency
sqoopParams.modelType = this.modelType
sqoopParams.sourceType = this.sourceType
sqoopParams.targetType = this.targetType
sqoopParams.targetParams = this._handleTargetParams()
sqoopParams.sourceParams = this._handleSourceParams()
}
// storage
this.$emit('on-params', sqoopParams)
return true
},
/**
* Processing code highlighting
*/
_handlerEditor () {
this._destroyEditor()
editor = codemirror('code-sqoop-mirror', {
mode: 'sql',
readOnly: this.isDetails
})
this.keypress = () => {
if (!editor.getOption('readOnly')) {
editor.showHint({
completeSingle: false,
extraKeys: {
Enter: ''
}
})
}
}
this.changes = () => {
this._cacheParams()
}
// Monitor keyboard
editor.on('keypress', this.keypress)
editor.on('changes', this.changes)
editor.setValue(this.sourceMysqlParams.srcQuerySql)
return editor
},
/**
* Processing code highlighting
*/
_handlerShellEditor () {
this._destroyShellEditor()
// shellEditor
shellEditor = codemirror('code-shell-mirror', {
mode: 'shell',
readOnly: this.isDetails
})
this.keypress = () => {
if (!shellEditor.getOption('readOnly')) {
shellEditor.showHint({
completeSingle: false,
extraKeys: {
Enter: ''
}
})
}
}
// Monitor keyboard
shellEditor.on('keypress', this.keypress)
shellEditor.setValue(this.customShell)
return shellEditor
},
/**
* return localParams
*/
_onLocalParams (a) {
this.localParams = a
},
/**
* return hadoopParams
*/
_onHadoopCustomParams (a) {
this.hadoopCustomParams = a
},
/**
* return sqoopAdvancedParams
*/
_onSqoopAdvancedParams (a) {
this.sqoopAdvancedParams = a
},
_cacheParams () {
this.$emit('on-cache-params', {
concurrency: this.concurrency,
modelType: this.modelType,
sourceType: this.sourceType,
targetType: this.targetType,
sourceParams: this._handleSourceParams(),
targetParams: this._handleTargetParams(),
localParams: this.localParams
})
},
_destroyEditor () {
if (editor) {
editor.toTextArea() // Uninstall
editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
}
},
_destroyShellEditor () {
if (shellEditor) {
shellEditor.toTextArea() // Uninstall
shellEditor.off($('.code-shell-mirror'), 'keypress', this.keypress)
shellEditor.off($('.code-shell-mirror'), 'changes', this.changes)
}
}
},
watch: {
// Listening to sqlType
sqlType (val) {
if (val === 0) {
this.showType = []
}
if (val !== 0) {
this.title = ''
}
},
// Listening data source
type (val) {
if (val !== 'HIVE') {
this.connParams = ''
}
},
// Watch the cacheParams
cacheParams (val) {
this._cacheParams()
}
},
created () {
this._destroyEditor()
let o = this.backfillItem
// Non-null objects represent backfill
if (!_.isEmpty(o)) {
this.jobType = o.params.jobType
this.isCustomTask = false
if (this.jobType === 'CUSTOM') {
this.customShell = o.params.customShell
this.isCustomTask = true
} else {
this.jobName = o.params.jobName
this.hadoopCustomParams = o.params.hadoopCustomParams
this.sqoopAdvancedParams = o.params.sqoopAdvancedParams
this.concurrency = o.params.concurrency || 1
this.modelType = o.params.modelType
this.sourceType = o.params.sourceType
this._getTargetTypeList(this.sourceType)
this.targetType = o.params.targetType
this._getSourceParams(o.params.sourceParams)
this._getTargetParams(o.params.targetParams)
this.localParams = o.params.localParams
}
}
},
mounted () {
setTimeout(() => {
this._handlerEditor()
}, 200)
setTimeout(() => {
this._handlerShellEditor()
}, 200)
setTimeout(() => {
this.srcQueryType = this.sourceMysqlParams.srcQueryType
}, 500)
},
destroyed () {
/**
* Destroy the editor instance
*/
if (editor) {
editor.toTextArea() // Uninstall
editor.off($('.code-sqoop-mirror'), 'keypress', this.keypress)
editor.off($('.code-sqoop-mirror'), 'changes', this.changes)
editor = null
}
},
computed: {
cacheParams () {
return {
concurrency: this.concurrency,
modelType: this.modelType,
sourceType: this.sourceType,
targetType: this.targetType,
localParams: this.localParams,
sourceMysqlParams: this.sourceMysqlParams,
sourceHdfsParams: this.sourceHdfsParams,
sourceHiveParams: this.sourceHiveParams,
targetHdfsParams: this.targetHdfsParams,
targetMysqlParams: this.targetMysqlParams,
targetHiveParams: this.targetHiveParams
}
}
},
components: { mListBox, mDatasource, mLocalParams, mScriptBox }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,172 | [Bug] [UI] Right-clicking the workflow does not respond | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
1.Select a workflow with many tasks.
2.Modify && save && online.
3.Enter the workflow, select a task, and right-click without responding.
This problem was occasional, and the video screen was shown to @lenboo at that time.
4.Format the DAG, select the "grid" layout ("dagre" layout is not OK), save, and right-click to use again.
### What you expected to happen
At any time, the right button can be used normally.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8172 | https://github.com/apache/dolphinscheduler/pull/8346 | 91b91c95192a23c1c4e0320d328b1a9776bccfc0 | d9df8319a2bf4cb515d1598431a98cef2d80f8a2 | "2022-01-24T08:33:14Z" | java | "2022-02-11T12:24:07Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="dag-canvas">
<dag-taskbar @on-drag-start="onDragStart" />
<div
class="dag-container"
ref="container"
@dragenter.prevent
@dragover.prevent
@dragleave.prevent
@drop.stop.prevent="onDrop"
>
<div ref="paper" class="paper"></div>
<div ref="minimap" class="minimap"></div>
<div class="scale-slider">
<span class="scale-title">{{$t('dagScale')}}</span>
<el-slider
v-model="scale"
vertical
:max="2"
:min="0.2"
:step="0.2"
:marks="SCALE_MARKS"
@input='scaleChange'
/>
</div>
<context-menu ref="contextMenu" />
</div>
<layout-config-modal ref="layoutModal" @submit="format" />
</div>
</template>
<script>
import _ from 'lodash'
import { Graph, DataUri } from '@antv/x6'
import dagTaskbar from './taskbar.vue'
import contextMenu from './contextMenu.vue'
import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue'
import {
NODE,
EDGE,
X6_NODE_NAME,
X6_EDGE_NAME,
NODE_STATUS_MARKUP
} from './x6-helper'
import { DagreLayout, GridLayout } from '@antv/layout'
import { tasksType, tasksState } from '../config'
import { mapActions, mapMutations, mapState } from 'vuex'
import nodeStatus from './nodeStatus'
import x6StyleMixin from './x6-style-mixin'
const SCALE_MARKS = {
0.2: '0.2',
1: '1',
2: '2'
}
export default {
name: 'dag-canvas',
data () {
return {
graph: null,
// Used to calculate the context menu location
originalScrollPosition: {
left: 0,
top: 0
},
editable: true,
dragging: {
// Distance from the mouse to the top-left corner of the dragging element
x: 0,
y: 0,
type: ''
},
// The canvas scale
scale: 1,
SCALE_MARKS
}
},
provide () {
return {
dagCanvas: this
}
},
mixins: [x6StyleMixin],
inject: ['dagChart'],
components: {
dagTaskbar,
contextMenu,
layoutConfigModal
},
computed: {
...mapState('dag', ['tasks'])
},
methods: {
...mapActions('dag', ['genTaskCodeList']),
...mapMutations('dag', ['removeTask']),
/**
* Recalculate the paper width and height
*/
paperResize () {
const w = this.$el.offsetWidth
const h = this.$el.offsetHeight
this.graph.resize(w, h)
},
/**
* Init graph
* This will be called in the dag-chart mounted event
* @param {boolean} uneditable
*/
graphInit (editable) {
const self = this
this.editable = !!editable
const paper = this.$refs.paper
const minimap = this.$refs.minimap
const graph = (this.graph = new Graph({
container: paper,
selecting: {
enabled: true,
multiple: true,
rubberband: true,
rubberEdge: true,
movable: true,
showNodeSelectionBox: false
},
scaling: {
min: 0.2,
max: 2
},
mousewheel: {
enabled: true,
modifiers: ['ctrl', 'meta']
},
scroller: true,
grid: {
size: 10,
visible: true
},
snapline: true,
minimap: {
enabled: true,
container: minimap,
scalable: false,
width: 200,
height: 120
},
interacting: {
edgeLabelMovable: false,
nodeMovable: !!editable,
magnetConnectable: !!editable
},
connecting: {
// Whether multiple edges can be created between the same start node and end
allowMulti: false,
// Whether a point is allowed to connect to a blank position on the canvas
allowBlank: false,
// The start node and the end node are the same node
allowLoop: false,
// Whether an edge is allowed to link to another edge
allowEdge: false,
// Whether edges are allowed to link to nodes
allowNode: true,
// Whether to allow edge links to ports
allowPort: false,
// Whether all available ports or nodes are highlighted when you drag the edge
highlight: true,
createEdge () {
return graph.createEdge({ shape: X6_EDGE_NAME })
},
validateConnection (data) {
const { sourceCell, targetCell } = data
if (
sourceCell &&
targetCell &&
sourceCell.isNode() &&
targetCell.isNode()
) {
const edgeData = {
sourceId: Number(sourceCell.id),
targetId: Number(targetCell.id)
}
if (!self.edgeIsValid(edgeData)) {
return false
}
}
return true
}
},
highlighting: {
nodeAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAvailable: {
name: 'className',
args: {
className: 'available'
}
},
magnetAdsorbed: {
name: 'className',
args: {
className: 'adsorbed'
}
}
}
}))
this.registerX6Shape()
this.bindGraphEvent()
this.originalScrollPosition = graph.getScrollbarPosition()
},
/**
* Register custom shapes
*/
registerX6Shape () {
Graph.unregisterNode(X6_NODE_NAME)
Graph.unregisterEdge(X6_EDGE_NAME)
Graph.registerNode(X6_NODE_NAME, { ...NODE })
Graph.registerEdge(X6_EDGE_NAME, { ...EDGE })
},
/**
* Bind grap event
*/
bindGraphEvent () {
this.bindStyleEvent(this.graph)
// update scale bar
this.graph.on('scale', ({ sx }) => {
this.scale = sx
})
// right click
this.graph.on('node:contextmenu', ({ x, y, cell }) => {
const { left, top } = this.graph.getScrollbarPosition()
const o = this.originalScrollPosition
this.$refs.contextMenu.show(x + (o.left - left), y + (o.top - top))
this.$refs.contextMenu.setCurrentTask({
name: cell.data.taskName,
type: cell.data.taskType,
code: Number(cell.id)
})
})
// node double click
this.graph.on('node:dblclick', ({ cell }) => {
this.dagChart.openFormModel(Number(cell.id), cell.data.taskType)
})
// create edge label
this.graph.on('edge:dblclick', ({ cell }) => {
const labelName = this.getEdgeLabelName(cell)
this.dagChart.$refs.edgeEditModel.show({
id: cell.id,
label: labelName
})
})
// Make sure the edge starts with node, not port
this.graph.on('edge:connected', ({ isNew, edge }) => {
if (isNew) {
const sourceNode = edge.getSourceNode()
edge.setSource(sourceNode)
}
})
// Add a node tool when the mouse entering
this.graph.on('node:mouseenter', ({ e, x, y, node, view }) => {
const nodeName = node.getData().taskName
node.addTools({
name: 'button',
args: {
markup: [
{
tagName: 'text',
textContent: nodeName,
attrs: {
fill: '#868686',
'font-size': 16,
'text-anchor': 'center'
}
}
],
x: 0,
y: 0,
offset: { x: 0, y: -10 }
}
})
})
// Remove all tools when the mouse leaving
this.graph.on('node:mouseleave', ({ node }) => {
node.removeTool('button')
})
},
/**
* @param {Edge|string} edge
*/
getEdgeLabelName (edge) {
if (typeof edge === 'string') edge = this.graph.getCellById(edge)
const labels = edge.getLabels()
const labelName = _.get(labels, ['0', 'attrs', 'label', 'text'], '')
return labelName
},
/**
* Set edge label by id
* @param {string} id
* @param {string} label
*/
setEdgeLabel (id, label) {
const edge = this.graph.getCellById(id)
edge.setLabels(label)
},
/**
* @param {number} limit
* @param {string} text
* Each Chinese character is equal to two chars
*/
truncateText (text, n) {
const exp = /[\u4E00-\u9FA5]/
let res = ''
let len = text.length
let chinese = text.match(new RegExp(exp, 'g'))
if (chinese) {
len += chinese.length
}
if (len > n) {
let i = 0
let acc = 0
while (true) {
let char = text[i]
if (exp.test(char)) {
acc += 2
} else {
acc++
}
if (acc > n) break
res += char
i++
}
res += '...'
} else {
res = text
}
return res
},
/**
* Set node name by id
* @param {string|number} id
* @param {string} name
*/
setNodeName (id, name) {
id += ''
const node = this.graph.getCellById(id)
if (node) {
const truncation = this.truncateText(name, 18)
node.attr('title/text', truncation)
node.setData({ taskName: name })
}
},
setNodeForbiddenStatus (id, flag) {
id += ''
const node = this.graph.getCellById(id)
if (node) {
if (flag) {
node.attr('rect/fill', '#c4c4c4')
} else {
node.attr('rect/fill', '#ffffff')
}
}
},
/**
* Convert the graph to JSON
* @return {{cells:Cell[]}}
*/
toJSON () {
return this.graph.toJSON()
},
/**
* Generate graph with JSON
*/
fromJSON (json) {
this.graph.fromJSON(json)
},
/**
* getNodes
* @return {Node[]}
*/
// interface Node {
// id: number;
// position: {x:number;y:number};
// data: {taskType:string;taskName:string;}
// }
getNodes () {
const nodes = this.graph.getNodes()
return nodes.map((node) => {
const position = node.getPosition()
const data = node.getData()
return {
id: Number(node.id),
position: position,
data: data
}
})
},
/**
* getEdges
* @return {Edge[]} Edge is inherited from the Cell
*/
// interface Edge {
// id: string;
// label: string;
// sourceId: number;
// targetId: number;
// }
getEdges () {
const edges = this.graph.getEdges()
return edges.map((edge) => {
const labelData = edge.getLabelAt(0)
return {
id: edge.id,
label: _.get(labelData, ['attrs', 'label', 'text'], ''),
sourceId: Number(edge.getSourceCellId()),
targetId: Number(edge.getTargetCellId())
}
})
},
/**
* downloadPNG
* @param {string} filename
*/
downloadPNG (fileName = 'chart') {
this.graph.toPNG(
(dataUri) => {
DataUri.downloadDataUri(dataUri, `${fileName}.png`)
},
{
padding: {
top: 50,
right: 50,
bottom: 50,
left: 50
},
backgroundColor: '#f2f3f7'
}
)
},
showLayoutModal () {
const layoutModal = this.$refs.layoutModal
if (layoutModal) {
layoutModal.show()
}
},
/**
* format
* @desc Auto layout use @antv/layout
*/
format (layoutConfig) {
if (!layoutConfig) {
layoutConfig = DEFAULT_LAYOUT_CONFIG
}
this.graph.cleanSelection()
let layoutFunc = null
if (layoutConfig.type === LAYOUT_TYPE.DAGRE) {
layoutFunc = new DagreLayout({
type: LAYOUT_TYPE.DAGRE,
rankdir: 'LR',
align: 'UL',
// Calculate the node spacing based on the edge label length
ranksepFunc: (d) => {
const edges = this.graph.getOutgoingEdges(d.id)
let max = 0
if (edges && edges.length > 0) {
edges.forEach((edge) => {
const edgeView = this.graph.findViewByCell(edge)
const labelWidth = +edgeView.findAttr(
'width',
_.get(edgeView, ['labelSelectors', '0', 'body'], null)
)
max = Math.max(max, labelWidth)
})
}
return layoutConfig.ranksep + max
},
nodesep: layoutConfig.nodesep,
controlPoints: true
})
} else if (layoutConfig.type === LAYOUT_TYPE.GRID) {
layoutFunc = new GridLayout({
type: LAYOUT_TYPE.GRID,
preventOverlap: true,
preventOverlapPadding: layoutConfig.padding,
sortBy: '_index',
rows: layoutConfig.rows || undefined,
cols: layoutConfig.cols || undefined,
nodeSize: 220
})
}
const json = this.toJSON()
const nodes = json.cells
.filter((cell) => cell.shape === X6_NODE_NAME)
.map((item) => {
return {
...item,
// sort by code aesc
_index: -item.id
}
})
const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME)
const newModel = layoutFunc.layout({
nodes: nodes,
edges: edges
})
this.fromJSON(newModel)
},
/**
* add a node to the graph
* @param {string|number} id
* @param {string} taskType
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
addNode (id, taskType, coordinate = { x: 100, y: 100 }) {
id += ''
if (!tasksType[taskType]) {
console.warn(`taskType:${taskType} is invalid!`)
return
}
const node = this.genNodeJSON(id, taskType, '', false, coordinate)
this.graph.addNode(node)
},
/**
* generate node json
* @param {number|string} id
* @param {string} taskType
* @param {boolean} forbidden flag
* @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 }
*/
genNodeJSON (id, taskType, taskName, flag, coordinate = { x: 100, y: 100 }) {
id += ''
const url = require(`../images/task-icos/${taskType.toLocaleLowerCase()}.png`)
const truncation = taskName ? this.truncateText(taskName, 18) : id
const nodeJson = {
id: id,
shape: X6_NODE_NAME,
x: coordinate.x,
y: coordinate.y,
data: {
taskType: taskType,
taskName: taskName
},
attrs: {
image: {
// Use href instead of xlink:href, you may lose the icon when downloadPNG
'xlink:href': url
},
title: {
text: truncation
}
}
}
if (flag) {
nodeJson.attrs.rect = { fill: '#c4c4c4' }
}
return nodeJson
},
/**
* generate edge json
* @param {number|string} sourceId
* @param {number|string} targetId
* @param {string} label
*/
genEdgeJSON (sourceId, targetId, label = '') {
sourceId += ''
targetId += ''
return {
shape: X6_EDGE_NAME,
source: {
cell: sourceId
},
target: {
cell: targetId
},
labels: label ? [label] : undefined
}
},
/**
* remove a node
* @param {string|number} id NodeId
*/
removeNode (id) {
id += ''
this.graph.removeNode(id)
this.removeTask(+id)
},
/**
* remove an edge
* @param {string} id EdgeId
*/
removeEdge (id) {
this.graph.removeEdge(id)
},
/**
* remove multiple cells
* @param {Cell[]} cells
*/
removeCells (cells) {
this.graph.removeCells(cells)
cells.forEach((cell) => {
if (cell.isNode()) {
this.removeTask(+cell.id)
}
})
},
/**
* Verify whether edge is valid
* The number of edges start with CONDITIONS task cannot be greater than 2
*/
edgeIsValid (edge) {
const { sourceId } = edge
const sourceTask = this.tasks.find((task) => task.code === sourceId)
if (sourceTask.taskType === 'CONDITIONS') {
const edges = this.getEdges()
return edges.filter((e) => e.sourceId === sourceTask.code).length <= 2
}
return true
},
/**
* Gets the current selections
* @return {Cell[]}
*/
getSelections () {
return this.graph.getSelectedCells()
},
/**
* Lock scroller
*/
lockScroller () {
this.graph.lockScroller()
},
/**
* Unlock scroller
*/
unlockScroller () {
this.graph.unlockScroller()
},
/**
* set node status icon
* @param {number} code
* @param {string} state
*/
setNodeStatus ({ code, state, taskInstance }) {
code += ''
const stateProps = tasksState[state]
const node = this.graph.getCellById(code)
if (node) {
// Destroy the previous dom
node.removeMarkup()
node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP))
const nodeView = this.graph.findViewByCell(node)
const el = nodeView.find('div')[0]
nodeStatus({
stateProps,
taskInstance
}).$mount(el)
}
},
/**
* Drag && Drop Event
*/
onDragStart (e, taskType) {
if (!this.editable) {
e.preventDefault()
return
}
this.dragging = {
x: e.offsetX,
y: e.offsetY,
type: taskType.name
}
},
onDrop (e) {
const { type } = this.dragging
const { x, y } = this.calcGraphCoordinate(e.clientX, e.clientY)
this.genTaskCodeList({
genNum: 1
})
.then((res) => {
const [code] = res
this.addNode(code, type, { x, y })
this.dagChart.openFormModel(code, type)
})
.catch((err) => {
console.error(err)
})
},
calcGraphCoordinate (mClientX, mClientY) {
// Distance from the mouse to the top-left corner of the container;
const { left: cX, top: cY } =
this.$refs.container.getBoundingClientRect()
const mouseX = mClientX - cX
const mouseY = mClientY - cY
// The distance that paper has been scrolled
const { left: sLeft, top: sTop } = this.graph.getScrollbarPosition()
const { left: oLeft, top: oTop } = this.originalScrollPosition
const scrollX = sLeft - oLeft
const scrollY = sTop - oTop
// Distance from the mouse to the top-left corner of the dragging element;
const { x: eX, y: eY } = this.dragging
return {
x: mouseX + scrollX - eX,
y: mouseY + scrollY - eY
}
},
/**
* Get prev nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPrevNodes(node2.code) => [node1]
*/
getPrevNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.targetId === code)
.map((edge) => nodesMap[edge.sourceId])
},
/**
* set prev nodes
* @param {number} code
* @param {number[]} preNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPreNodes (code, preNodeCodes, override) {
const edges = this.getEdges()
const currPreCodes = []
edges.forEach((edge) => {
if (edge.targetId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPreCodes.push(edge.sourceId)
}
}
})
preNodeCodes.forEach((preCode) => {
if (currPreCodes.includes(preCode) || preCode === code) return
const edge = this.genEdgeJSON(preCode, code)
this.graph.addEdge(edge)
})
},
/**
* Get post nodes by code
* @param {number} code
* node1 -> node2 -> node3
* getPostNodes(node2.code) => [node3]
*/
getPostNodes (code) {
const nodes = this.getNodes()
const edges = this.getEdges()
const nodesMap = {}
nodes.forEach((node) => {
nodesMap[node.id] = node
})
return edges
.filter((edge) => edge.sourceId === code)
.map((edge) => nodesMap[edge.targetId])
},
/**
* set post nodes
* @param {number} code
* @param {number[]} postNodeCodes
* @param {boolean} override If set to true, setPreNodes will delete all edges that end with the node and rebuild
*/
setPostNodes (code, postNodeCodes, override) {
const edges = this.getEdges()
const currPostCodes = []
edges.forEach((edge) => {
if (edge.sourceId === code) {
if (override) {
this.removeEdge(edge.id)
} else {
currPostCodes.push(edge.targetId)
}
}
})
postNodeCodes.forEach((postCode) => {
if (currPostCodes.includes(postCode) || postCode === code) return
const edge = this.genEdgeJSON(code, postCode)
this.graph.addEdge(edge)
})
},
/**
* Navigate to cell
* @param {string} taskName
*/
navigateTo (taskName) {
const nodes = this.getNodes()
nodes.forEach((node) => {
if (node.data.taskName === taskName) {
const id = node.id
const cell = this.graph.getCellById(id)
this.graph.scrollToCell(cell, { animation: { duration: 600 } })
this.graph.cleanSelection()
this.graph.select(cell)
}
})
},
/**
* Canvas scale
*/
scaleChange (val) {
this.graph.zoomTo(val)
}
}
}
</script>
<style lang="scss" scoped>
@import "./canvas";
</style>
<style lang="scss">
@import "./x6-style";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,172 | [Bug] [UI] Right-clicking the workflow does not respond | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
1.Select a workflow with many tasks.
2.Modify && save && online.
3.Enter the workflow, select a task, and right-click without responding.
This problem was occasional, and the video screen was shown to @lenboo at that time.
4.Format the DAG, select the "grid" layout ("dagre" layout is not OK), save, and right-click to use again.
### What you expected to happen
At any time, the right button can be used normally.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8172 | https://github.com/apache/dolphinscheduler/pull/8346 | 91b91c95192a23c1c4e0320d328b1a9776bccfc0 | d9df8319a2bf4cb515d1598431a98cef2d80f8a2 | "2022-01-24T08:33:14Z" | java | "2022-02-11T12:24:07Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.dag-context-menu{
position: absolute;
left: 0;
top: 0;
width: 100px;
background-color: #ffffff;
box-shadow: 0 2px 10px rgba(0,0,0,0.12);
.menu-item{
padding: 5px 10px;
border-bottom: solid 1px #f2f3f7;
cursor: pointer;
color: rgb(89, 89, 89);
font-size: 12px;
&:hover:not(.disabled){
color: #262626;
background-color: #f5f5f5;
}
&.disabled{
cursor: not-allowed;
color: rgba(89, 89, 89, .4);
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,187 | [Bug] [resource] Whether the re-upload function can be retained | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In the Resource Center, the re-upload function is gone.
If you need to re-upload the file, you must delete the file before uploading it.
Can it be like version 1.3.X, there is a re-upload function, which can directly overwrite the original file.
![image](https://user-images.githubusercontent.com/18095523/150906906-511fda70-fe47-4f1e-981f-4dd0827c135f.png)
### What you expected to happen
There is a function to re-upload the file.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8187 | https://github.com/apache/dolphinscheduler/pull/8359 | b0dbc33730bcc3a8cdb70c3daab469ce8d9769aa | 3f6caef08fc79e8253ff69372f36dbef49d312d2 | "2022-01-25T03:42:49Z" | java | "2022-02-13T03:47:10Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('Name')">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.alias }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links" @click="_go(scope.row)">{{ scope.row.alias }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('Whether directory')" width="100">
<template slot-scope="scope">
{{scope.row.directory? $t('Yes') : $t('No')}}
</template>
</el-table-column>
<el-table-column prop="fileName" :label="$t('File Name')"></el-table-column>
<el-table-column :label="$t('Description')" width="200">
<template slot-scope="scope">
<span>{{scope.row.description | filterNull}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Size')">
<template slot-scope="scope">
{{_rtSize(scope.row.size)}}
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" :disabled="_rtDisb(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Rename')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit" @click="_rename(scope.row,scope.$index)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Download')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-download" @click="_downloadFile(scope.row)" :disabled="scope.row.directory? true: false" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="renameDialog"
width="auto">
<m-rename :item="item" @onUpDate="onUpDate" @close="close"></m-rename>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: [],
renameDialog: false
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if (item.directory) {
localStore.setItem('pid', `${item.id}`)
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile(`resources/${item.id}/download`)
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
this.item = item
this.index = i
this.renameDialog = true
},
onUpDate (item) {
this.$set(this.list, this.index, item)
this.renameDialog = false
},
close () {
this.renameDialog = false
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
// Listening for routing changes
// '$route': {
// deep: false,
// handler () {
// this.$emit('on-update',this.$route.params.id)
// }
// }
},
beforeRouteUpdate (to, from, next) {
next() // 一定要有next
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { mRename }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,187 | [Bug] [resource] Whether the re-upload function can be retained | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In the Resource Center, the re-upload function is gone.
If you need to re-upload the file, you must delete the file before uploading it.
Can it be like version 1.3.X, there is a re-upload function, which can directly overwrite the original file.
![image](https://user-images.githubusercontent.com/18095523/150906906-511fda70-fe47-4f1e-981f-4dd0827c135f.png)
### What you expected to happen
There is a function to re-upload the file.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8187 | https://github.com/apache/dolphinscheduler/pull/8359 | b0dbc33730bcc3a8cdb70c3daab469ce8d9769aa | 3f6caef08fc79e8253ff69372f36dbef49d312d2 | "2022-01-25T03:42:49Z" | java | "2022-02-13T03:47:10Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popup
ref="popup"
:ok-text="$t('Upload')"
:nameText="$t('ReUpload File')"
@ok="_ok"
:disabled="progress === 0 ? false : true">
<template slot="content">
<form name="files" enctype="multipart/form-data" method="post">
<div class="file-update-model"
@drop.prevent="_onDrop"
@dragover.prevent="dragOver = true"
@dragleave.prevent="dragOver = false"
id="file-update-model">
<div class="tooltip-info">
<em class="ans el-icon-warning"></em>
<span>{{$t('Drag the file into the current upload window')}}</span>
</div>
<div class="update-popup" v-if="dragOver">
<div class="icon-box">
<em class="ans el-icon-upload"></em>
</div>
<p class="p1">
<span>{{$t('Drag area upload')}}</span>
</p>
</div>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('File Name')}}</template>
<template slot="content">
<el-input
type="input"
size="small"
v-model="name"
:disabled="progress !== 0"
:placeholder="$t('Please enter name')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Description')}}</template>
<template slot="content">
<el-input
type="textarea"
size="small"
v-model="description"
:disabled="progress !== 0"
:placeholder="$t('Please enter description')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Upload Files')}}</template>
<template slot="content">
<div class="file-update-box">
<template v-if="progress === 0">
<input ref="file" name="file" type="file" class="file-update" @change="_onChange">
<el-button size="mini"> {{$t('Upload')}} </el-button>
</template>
<div class="progress-box" v-if="progress !== 0">
<m-progress-bar :value="progress" text-placement="left-right"></m-progress-bar>
</div>
</div>
</template>
</m-list-box-f>
</div>
</form>
</template>
</m-popup>
</template>
<script>
import io from '@/module/io'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
import mProgressBar from '@/module/components/progressBar/progressBar'
export default {
name: 'file-update',
data () {
return {
store,
// name
name: '',
// description
description: '',
// progress
progress: 0,
// file
file: null,
currentDir: '/',
// Whether to drag upload
dragOver: false
}
},
watch: {
},
props: {
type: String,
fileName: String,
desc: String,
id: Number
},
methods: {
/**
* submit
*/
_ok () {
this.$refs.popup.spinnerLoading = true
if (this._validation()) {
if (this.fileName === this.name) {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if (isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs.popup.spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs.popup.spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs.popup.spinnerLoading = false
}
} else {
this.store.dispatch('resource/resourceVerifyName', {
fullName: '/' + this.name,
type: this.type
}).then(res => {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if (isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs.popup.spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs.popup.spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs.popup.spinnerLoading = false
}
}).catch(e => {
this.$message.error(e.msg || '')
this.$refs.popup.spinnerLoading = false
})
}
} else {
this.$refs.popup.spinnerLoading = false
}
},
/**
* validation
*/
_validation () {
if (!this.name) {
this.$message.warning(`${i18n.$t('Please enter file name')}`)
return false
}
if (!this.file) {
this.$message.warning(`${i18n.$t('Please select the file to upload')}`)
return false
}
return true
},
/**
* update file
*/
_formDataUpdate () {
return new Promise((resolve, reject) => {
let self = this
let formData = new FormData()
formData.append('file', this.file)
formData.append('name', this.name)
formData.append('description', this.description)
formData.append('id', this.id)
formData.append('type', this.type)
io.post('resources/update', res => {
this.$message.success(res.msg)
resolve()
self.$emit('onUpdate')
this.reset()
}, e => {
reject(e)
self.$emit('close')
this.$message.error(e.msg || '')
this.reset()
}, {
data: formData,
emulateJSON: false,
onUploadProgress (progressEvent) {
// Size has been uploaded
let loaded = progressEvent.loaded
// Total attachment size
let total = progressEvent.total
self.progress = Math.floor(100 * loaded / total)
self.$emit('onProgress', self.progress)
}
})
})
},
/**
* Archive to the top right corner Continue uploading
*/
_ckArchive () {
$('.update-file-modal').hide()
this.$emit('onArchive')
},
reset () {
this.name = ''
this.description = ''
this.progress = 0
this.file = null
this.currentDir = '/'
this.dragOver = false
},
/**
* Drag and drop upload
*/
_onDrop (e) {
let file = e.dataTransfer.files[0]
this.file = file
this.name = file.name
this.dragOver = false
},
_onChange () {
let file = this.$refs.file.files[0]
this.file = file
this.name = file.name
this.$refs.file.value = null
}
},
mounted () {
this.name = this.fileName
this.description = this.desc
},
components: { mPopup, mListBoxF, mProgressBar }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.file-update-model {
.tooltip-info {
position: absolute;
left: 20px;
bottom: 26px;
span {
font-size: 12px;
color: #666;
vertical-align: middle;
}
.fa,.ans {
color: #0097e0;
font-size: 14px;
vertical-align: middle;
}
}
.hide-archive {
position: absolute;
right: 22px;
top: 17px;
.fa,.ans{
font-size: 16px;
color: #333;
font-weight: normal;
cursor: pointer;
&:hover {
color: #0097e0;
}
}
}
.file-update-box {
padding-top: 4px;
position: relative;
.file-update {
width: 70px;
height: 40px;
position: absolute;
left: 0;
top: 0;
cursor: pointer;
filter: alpha(opacity=0);
-moz-opacity: 0;
opacity: 0;
}
&:hover {
.v-btn-dashed {
background-color: transparent;
border-color: #47c3ff;
color: #47c3ff;
cursor: pointer;
}
}
.progress-box {
width: 200px;
position: absolute;
left: 70px;
top: 14px;
}
}
.update-popup {
width: calc(100% - 20px);
height: calc(100% - 20px);
background: rgba(255,253,239,.7);
position: absolute;
top: 10px;
left: 10px;
border-radius: 3px;
z-index: 1;
border: .18rem dashed #cccccc;
.icon-box {
text-align: center;
margin-top: 96px;
.fa,.ans {
font-size: 50px;
color: #2d8cf0;
}
}
.p1 {
text-align: center;
font-size: 16px;
color: #333;
padding-top: 8px;
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,187 | [Bug] [resource] Whether the re-upload function can be retained | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In the Resource Center, the re-upload function is gone.
If you need to re-upload the file, you must delete the file before uploading it.
Can it be like version 1.3.X, there is a re-upload function, which can directly overwrite the original file.
![image](https://user-images.githubusercontent.com/18095523/150906906-511fda70-fe47-4f1e-981f-4dd0827c135f.png)
### What you expected to happen
There is a function to re-upload the file.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8187 | https://github.com/apache/dolphinscheduler/pull/8359 | b0dbc33730bcc3a8cdb70c3daab469ce8d9769aa | 3f6caef08fc79e8253ff69372f36dbef49d312d2 | "2022-01-25T03:42:49Z" | java | "2022-02-13T03:47:10Z" | dolphinscheduler-ui/src/js/module/components/nav/nav.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="nav-model">
<router-link :to="{ path: '/home'}" tag="div" class="logo-box">
<a href="javascript:"></a>
</router-link>
<div class="nav-box">
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/home'}" tag="a" active-class="active">
<span><em class="ansfont ri-home-4-line"></em>{{$t('Home')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/projects'}" tag="a" active-class="active" id="tabProject">
<span><em class="ansiconfont el-icon-tickets"></em>{{$t('Project Manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/dataquality'}" tag="a" active-class="active">
<span><em class="ansiconfont el-icon-document-checked"></em>{{$t('DataQuality')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/resource'}" tag="a" active-class="active" id="tabResource">
<span><em class="ansiconfont el-icon-folder"></em>{{$t('Resources manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/datasource'}" tag="a" active-class="active" id="tabDataSource">
<span><em class="ansfont ri-database-2-line"></em>{{$t('Datasource manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/monitor'}" tag="a" active-class="active">
<span><em class="ansiconfont el-icon-monitor"></em>{{$t('Monitor')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list" >
<div class="nav-links">
<router-link :to="{ path: '/security'}" tag="a" active-class="active" v-ps="['ADMIN_USER']" id="tabSecurity">
<span><em class="ansfont ri-shield-check-line"></em>{{$t('Security')}}</span><strong></strong>
</router-link>
</div>
</div>
</div>
<div class="right">
<span class="lang">
<el-dropdown @command="_toggleLanguage">
<span class="el-dropdown-link">
{{activeLocale.name}}<em class="el-icon-arrow-down"></em>
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item :command="item.code" v-for="(item,$index) in localeList" :key="$index">{{item.name}}</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</span>
<el-dropdown @command="_toggleUser">
<span class="el-dropdown-link">
<em class="el-icon-user-solid"></em>{{userInfo.userName}}<em class="el-icon-arrow-down"></em>
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item command="user">
<em class="el-icon-user"></em>
<span>{{$t('User Information')}}</span>
</el-dropdown-item>
<el-dropdown-item command="logout">
<em class="el-icon-switch-button"></em>
<span>{{$t('Logout')}}</span>
</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</div>
<div class="file-update-model" @click="_toggleArchive" v-if="isUpdate">
<div class="icon-cloud">
<em class="ans el-icon-upload"></em>
</div>
<div class="progress-box">
<m-progress-bar :value="progress" text-placement="bottom"></m-progress-bar>
</div>
</div>
<div class="adaptive-m-nav">
<div class="m-nav-box ">
<a href="javascript:" @click="mIsNav = !mIsNav"><em class="ansfont ri-database-2-line"></em></a>
</div>
<div class="m-title-box">
<div class="logo-m"></div>
</div>
<div class="m-user-box">
<a href="javascript:" @click="_goAccount"><em class="el-icon-user"></em></a>
</div>
<transition name="slide-fade">
<div class="m-nav-list" v-if="mIsNav">
<ul @click="mIsNav = false">
<router-link :to="{ path: '/home'}" tag="li" active-class="active">
<em class="ansfont ri-home-4-line"></em>
<span>{{$t('Home')}}</span>
</router-link>
<router-link :to="{ path: '/projects'}" tag="li" active-class="active">
<em class="el-icon-tickets"></em>
<span>{{$t('Project Manage')}}</span>
</router-link>
<router-link :to="{ path: '/dataquality'}" tag="li" active-class="active">
<em class="el-icon-document-checked"></em>
<span>{{$t('DataQuality')}}</span>
</router-link>
<router-link :to="{ path: '/resource'}" tag="li" active-class="active">
<em class="el-icon-folder"></em>
<span>{{$t('Resources manage')}}</span>
</router-link>
<router-link :to="{ path: '/datasource'}" tag="li" active-class="active">
<em class="ansfont ri-database-2-line"></em>
<span>{{$t('Datasource manage')}}</span>
</router-link>
<router-link :to="{ path: '/security'}" tag="li" active-class="active" v-ps="['ADMIN_USER']">
<em class="ansfont ri-shield-check-line"></em>
<span>{{$t('Security')}}</span>
</router-link>
</ul>
</div>
</transition>
</div>
<el-dialog
:visible.sync="definitionUpdateDialog"
append-to-body="true"
width="auto">
<m-definition-update :type="type" @onProgressDefinition="onProgressDefinition" @onUpdateDefinition="onUpdateDefinition" @onArchiveDefinition="onArchiveDefinition" @closeDefinition="closeDefinition"></m-definition-update>
</el-dialog>
<el-dialog
id="fileUpdateDialog"
:visible.sync="fileUpdateDialog"
append-to-body="true"
width="auto">
<m-file-update :type="type" @onProgressFileUpdate="onProgressFileUpdate" @onUpdateFileUpdate="onUpdateFileUpdate" @onArchiveDefinition="onArchiveFileUpdate" @closeFileUpdate="closeFileUpdate"></m-file-update>
</el-dialog>
<el-dialog
:visible.sync="fileChildUpdateDialog"
append-to-body="true"
width="auto">
<m-file-child-update :type="type" :id="id" @onProgressFileChildUpdate="onProgressFileChildUpdate" @onUpdateFileChildUpdate="onUpdateFileChildUpdate" @onArchiveFileChildUpdate="onArchiveFileChildUpdate" @closeFileChildUpdate="closeFileChildUpdate"></m-file-child-update>
</el-dialog>
<el-dialog
:visible.sync="resourceChildUpdateDialog"
append-to-body="true"
width="auto">
<m-resource-child-update :type="type" :id="id" @onProgressResourceChildUpdate="onProgressResourceChildUpdate" @onUpdateResourceChildUpdate="onUpdateResourceChildUpdate" @onArchiveFileChildUpdate="onArchiveResourceChildUpdate" @closeResourceChildUpdate="closeResourceChildUpdate"></m-resource-child-update>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import cookies from 'js-cookie'
import { mapState, mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mFileUpdate from '@/module/components/fileUpdate/fileUpdate'
import mFileChildUpdate from '@/module/components/fileUpdate/fileChildUpdate'
import mResourceChildUpdate from '@/module/components/fileUpdate/resourceChildUpdate'
import mDefinitionUpdate from '@/module/components/fileUpdate/definitionUpdate'
import mProgressBar from '@/module/components/progressBar/progressBar'
import { findLocale, localeList } from '@/module/i18n/config'
export default {
name: 'roof-nav',
data () {
return {
// Whether to drag
isDrag: false,
// Upload progress
progress: 0,
// Whether to upload
isUpdate: false,
// Whether to log in
isLogin: false,
// Mobile compatible navigation
mIsNav: false,
// Take the language list data to get rid of the language pack
localeList: _.map(_.cloneDeep(localeList()), v => _.omit(v, ['locale'])),
// Selected language
activeLocale: '',
// Environmental variable
docLink: '',
type: '',
definitionUpdateDialog: false,
fileUpdateDialog: false,
fileChildUpdateDialog: false,
id: null,
resourceChildUpdateDialog: false
}
},
methods: {
...mapActions('user', ['signOut']),
/**
* User Info
*/
_goAccount () {
this.isLogin = false
this.$router.push({ name: 'account' })
},
/**
* _toggle User
*/
_toggleUser (command) {
if (command === 'user') {
this._goAccount()
} else {
this._signOut()
}
},
/**
* Upload (for the time being)
*/
_fileUpdate (type) {
if (this.progress) {
this._toggleArchive()
return
}
this.type = type
if (this.type === 'DEFINITION') {
this.definitionUpdateDialog = true
} else {
this.fileUpdateDialog = true
}
},
onProgressDefinition (val) {
this.progress = val
},
onUpdateDefinition () {
let self = this
findComponentDownward(self.$root, 'definition-list-index')._updateList()
this.isUpdate = false
this.progress = 0
this.definitionUpdateDialog = false
},
onArchiveDefinition () {
this.isUpdate = true
},
closeDefinition () {
this.progress = 0
this.definitionUpdateDialog = false
},
onProgressFileUpdate (val) {
this.progress = val
},
onUpdateFileUpdate () {
let self = this
findComponentDownward(self.$root, `resource-list-index-${this.type}`)._updateList()
this.isUpdate = false
this.progress = 0
this.fileUpdateDialog = false
},
onArchiveFileUpdate () {
this.isUpdate = true
},
closeFileUpdate () {
this.progress = 0
this.fileUpdateDialog = false
},
_fileChildUpdate (type, data) {
if (this.progress) {
this._toggleArchive()
return
}
this.type = type
this.id = data
this.fileChildUpdateDialog = true
},
onProgressFileChildUpdate (val) {
this.progress = val
},
onUpdateFileChildUpdate () {
let self = this
findComponentDownward(self.$root, `resource-list-index-${this.type}`)._updateList()
this.isUpdate = false
this.progress = 0
this.fileChildUpdateDialog = false
},
onArchiveFileChildUpdate () {
this.isUpdate = true
},
closeFileChildUpdate () {
this.progress = 0
this.fileChildUpdateDialog = false
},
_resourceChildUpdate (type, data) {
if (this.progress) {
this._toggleArchive()
return
}
this.type = type
this.id = data
this.resourceChildUpdateDialog = true
},
onProgressResourceChildUpdate (val) {
this.progress = val
},
onUpdateResourceChildUpdate () {
let self = this
findComponentDownward(self.$root, `resource-list-index-${this.type}`)._updateList()
this.isUpdate = false
this.progress = 0
this.resourceChildUpdateDialog = false
},
onArchiveResourceChildUpdate () {
this.isUpdate = true
},
closeResourceChildUpdate () {
this.progress = 0
this.resourceChildUpdateDialog = false
},
/**
* Upload popup layer display
*/
_toggleArchive () {
$('.update-file-modal').show()
},
/**
* sign out
*/
_signOut () {
this.signOut()
},
/**
* Language switching
*/
_toggleLanguage (language) {
console.log(language)
cookies.set('language', language, { path: '/' })
setTimeout(() => {
window.location.reload()
}, 100)
}
},
created () {
let language = cookies.get('language')
this.activeLocale = language ? findLocale(language) : '中文'
this.docLink = process.env.NODE_ENV === 'true' ? 'docs' : `/view/docs/${this.activeLocale.code}/_book` // eslint-disable-line
},
computed: {
...mapState('user', ['userInfo'])
},
components: { mFileUpdate, mProgressBar, mDefinitionUpdate, mFileChildUpdate, mResourceChildUpdate }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.nav-model {
height: 60px;
background: #2D303A;
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.3);
position: relative;
.m-title-box {
text-align: center;
.logo-m {
width: 36px;
height: 36px;
margin: 0 auto;
position: relative;
top: 12px;
}
}
.el-dropdown {
color: #fff;
font-size: 14px;
vertical-align: middle;
line-height: 60px;
margin-right: 25px;
}
.logo-box {
position: absolute;
left: 10px;
top: 8px;
cursor: pointer;
>a {
width: 180px;
height: 46px;
display: block;
background: url("./logo.svg") no-repeat;
}
}
.nav-box {
height: 60px;
line-height: 60px;
position: absolute;
left: 220px;
top: 0;
.list {
width: 106px;
float: left;
position: relative;
cursor: pointer;
margin-right: 14px;
.nav-links {
height: 60px;
a {
position: relative;
text-decoration: none;
font-size: 15px;
color: #333;
display: inline-block;
float: left;
text-align: center;
span {
display: block;
width: 106px;
color: #fff;
.ansiconfont {
vertical-align: -2px;
font-size: 22px;
margin-right: 4px;
}
.ansfont {
vertical-align: -6px;
font-size: 24px;
margin-right: 4px;
}
}
&:hover {
span {
color: #2d8cf0;
}
}
&.active {
span {
color: #2D8BF0;
i {
color: #2d8cf0;
}
}
b {
height: 2px;
background: #2d8cf0;
display: block;
margin-top: -2px;
}
}
}
}
}
}
.right {
position: absolute;
right: 0;
top: 0;
.ans-poptip {
min-width: 120px;
}
>.lang {
.ans-poptip {
min-width: 80px;
}
}
>.docs {
padding-right: 20px;
a {
color: #fff;
font-size: 14px;
vertical-align: middle;
&:hover {
color: #2d8bf0;
}
}
}
.current-project {
height: 60px;
line-height: 56px;
display: inline-block;
margin-right: 16px;
cursor: pointer;
i {
font-size: 18px;
vertical-align: middle;
&:nth-child(1) {
font-size: 20px;
color:#2d8cf0;
}
}
span {
color: #333;
vertical-align: middle;
font-size: 14px;
}
&:hover {
span {
color: #2d8cf0;
}
}
}
.login-model {
display: inline-block;
margin-right: 20px;
cursor: pointer;
margin-top: 16px;
em {
font-size: 18px;
vertical-align: middle;
color: #fff;
}
span {
color: #fff;
vertical-align: middle;
font-size: 14px;
}
&:hover {
>i,>span {
color: #2d8cf0;
}
}
}
.lrns-list {
margin: -6px 0;
a {
display: block;
height: 30px;
line-height: 30px;
font-size: 14px;
&:hover {
i,span {
color: #2d8cf0;
}
}
}
}
}
.file-update-model {
cursor: pointer;
.progress-box {
width: 240px;
}
.icon-cloud {
position: absolute;
left: -34px;
top: 2px;
> i {
font-size: 22px;
color: #2d8cf0;
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,887 | [Bug] [Parameter Context] The preparameter OUT is null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
在首节点设定两个OUT参数,其中一个给常量,另一个以shell设置参数值传递的方式,传递到下一个节点时,其中常量的参数为null
![image](https://user-images.githubusercontent.com/23023197/148558082-d585afef-fa4c-43f7-a746-67c3fc90fd3b.png)
![image](https://user-images.githubusercontent.com/23023197/148558172-974f8199-704b-4b8d-ba6a-a647f2f1d167.png)
![image](https://user-images.githubusercontent.com/23023197/148558219-e7c08f89-f3ee-49ff-8cb9-0a6edfa78ebb.png)
### What you expected to happen
AbstractParameters中在给OUT参数设置value时,把原有的常量值设置了为null
### How to reproduce
![image](https://user-images.githubusercontent.com/23023197/148558549-ca1d24d9-5b4e-4a9e-a487-73da75be45f7.png)
![image](https://user-images.githubusercontent.com/23023197/148558600-e89bc08a-c983-447f-81ab-6e69d0a72121.png)
![image](https://user-images.githubusercontent.com/23023197/148558645-71e60c1f-c3bc-4fa7-95d1-e2c99a5f8bf9.png)
![image](https://user-images.githubusercontent.com/23023197/148558700-f5aa7d25-325b-4565-8fb4-b5ad24769617.png)
### Anything else
每次都会发生
### Version
2.0.1
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7887 | https://github.com/apache/dolphinscheduler/pull/8315 | b9406d356cd20fdb96d2fe0eeac327c346eaf1c5 | 703d7904159b6e839209d498af47cb4d112ae22a | "2022-01-07T14:33:24Z" | java | "2022-02-14T06:25:23Z" | dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractParameters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.spi.task;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections4.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
/**
* job params related class
*/
public abstract class AbstractParameters implements IParameters {
/**
* local parameters
*/
protected List<Property> localParams;
/**
* var pool
*/
public List<Property> varPool;
/**
* get local parameters list
* @return Property list
*/
public List<Property> getLocalParams() {
return localParams;
}
public void setLocalParams(List<Property> localParams) {
this.localParams = localParams;
}
/**
* get local parameters map
* @return parameters map
*/
public Map<String, Property> getLocalParametersMap() {
if (localParams != null) {
Map<String, Property> localParametersMaps = new LinkedHashMap<>();
for (Property property : localParams) {
localParametersMaps.put(property.getProp(),property);
}
return localParametersMaps;
}
return null;
}
/**
* get varPool map
*
* @return parameters map
*/
public Map<String, Property> getVarPoolMap() {
if (varPool != null) {
Map<String, Property> varPoolMap = new LinkedHashMap<>();
for (Property property : varPool) {
varPoolMap.put(property.getProp(), property);
}
return varPoolMap;
}
return null;
}
public List<Property> getVarPool() {
return varPool;
}
public void setVarPool(String varPool) {
if (StringUtils.isEmpty(varPool)) {
this.varPool = new ArrayList<>();
} else {
this.varPool = JSONUtils.toList(varPool, Property.class);
}
}
public void dealOutParam(String result) {
if (CollectionUtils.isEmpty(localParams)) {
return;
}
List<Property> outProperty = getOutProperty(localParams);
if (CollectionUtils.isEmpty(outProperty)) {
return;
}
if (StringUtils.isEmpty(result)) {
varPool.addAll(outProperty);
return;
}
Map<String, String> taskResult = getMapByString(result);
if (taskResult == null || taskResult.size() == 0) {
return;
}
for (Property info : outProperty) {
info.setValue(taskResult.get(info.getProp()));
varPool.add(info);
}
}
public List<Property> getOutProperty(List<Property> params) {
if (CollectionUtils.isEmpty(params)) {
return new ArrayList<>();
}
List<Property> result = new ArrayList<>();
for (Property info : params) {
if (info.getDirect() == Direct.OUT) {
result.add(info);
}
}
return result;
}
public List<Map<String, String>> getListMapByString(String json) {
List<Map<String, String>> allParams = new ArrayList<>();
ArrayNode paramsByJson = JSONUtils.parseArray(json);
for (JsonNode jsonNode : paramsByJson) {
Map<String, String> param = JSONUtils.toMap(jsonNode.toString());
allParams.add(param);
}
return allParams;
}
/**
* shell's result format is key=value$VarPool$key=value$VarPool$
* @param result
* @return
*/
public static Map<String, String> getMapByString(String result) {
String[] formatResult = result.split("\\$VarPool\\$");
Map<String, String> format = new HashMap<>();
for (String info : formatResult) {
if (StringUtils.isNotEmpty(info) && info.contains("=")) {
String[] keyValue = info.split("=");
format.put(keyValue[0], keyValue[1]);
}
}
return format;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,370 | [Bug] [UI] There isn't the re-uploading file button when the file is in the root directory. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
There isn't the re-uploading file button when the file is under the root directory.
![image](https://user-images.githubusercontent.com/4928204/153817029-39836c79-99f1-42b8-a378-3d2b7e85d60c.png)
When the file is under the sub-directory there is a the re-uploading file button.
![image](https://user-images.githubusercontent.com/4928204/153817070-3b65c77d-1fee-4e2a-a381-7b353935e198.png)
### What you expected to happen
I expect that the re-uploading file button can be used under the root directory.
### How to reproduce
You can browse the page of files management and upload a file under the root directory. Meanwhile the re-uploading file button won't display there.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8370 | https://github.com/apache/dolphinscheduler/pull/8375 | 49a1270ef5efd9e47d476752ed2e2e92a6ef99f1 | b2108486ba28dcc04436204347401475e245724f | "2022-02-14T07:20:42Z" | java | "2022-02-14T12:14:24Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%" row-class-name="items">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('Name')">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.alias }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links" @click="_go(scope.row)">{{ scope.row.alias }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column prop="userName" :label="$t('Resource userName')"></el-table-column>
<el-table-column :label="$t('Whether directory')" width="100">
<template slot-scope="scope">
{{scope.row.directory? $t('Yes') : $t('No')}}
</template>
</el-table-column>
<el-table-column prop="fileName" :label="$t('File Name')"></el-table-column>
<el-table-column :label="$t('Description')" width="200">
<template slot-scope="scope">
<span>{{scope.row.description | filterNull}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Size')">
<template slot-scope="scope">
{{_rtSize(scope.row.size)}}
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top">
<span><el-button id="btnEdit" type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" :disabled="_rtDisb(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Rename')" placement="top">
<span><el-button id="btnRename" type="primary" size="mini" icon="el-icon-edit" @click="_rename(scope.row,scope.$index)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Download')" placement="top">
<span><el-button id="btnDownload" type="primary" size="mini" icon="el-icon-download" @click="_downloadFile(scope.row)" :disabled="scope.row.directory? true: false" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button id="delete" type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="renameDialog"
width="auto">
<m-rename :item="item" @onUpDate="onUpDate" @close="close"></m-rename>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: [],
renameDialog: false,
item: {},
index: null
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if (item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile(`resources/${item.id}/download`)
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
this.item = item
this.index = i
this.renameDialog = true
},
onUpDate (item) {
this.$set(this.list, this.index, item)
this.renameDialog = false
},
close () {
this.renameDialog = false
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { mRename }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,370 | [Bug] [UI] There isn't the re-uploading file button when the file is in the root directory. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
There isn't the re-uploading file button when the file is under the root directory.
![image](https://user-images.githubusercontent.com/4928204/153817029-39836c79-99f1-42b8-a378-3d2b7e85d60c.png)
When the file is under the sub-directory there is a the re-uploading file button.
![image](https://user-images.githubusercontent.com/4928204/153817070-3b65c77d-1fee-4e2a-a381-7b353935e198.png)
### What you expected to happen
I expect that the re-uploading file button can be used under the root directory.
### How to reproduce
You can browse the page of files management and upload a file under the root directory. Meanwhile the re-uploading file button won't display there.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8370 | https://github.com/apache/dolphinscheduler/pull/8375 | 49a1270ef5efd9e47d476752ed2e2e92a6ef99f1 | b2108486ba28dcc04436204347401475e245724f | "2022-02-14T07:20:42Z" | java | "2022-02-14T12:14:24Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popup
ref="popup"
:ok-text="$t('Upload')"
:nameText="$t('ReUpload File')"
@ok="_ok"
@close="_close"
:disabled="progress === 0 ? false : true">
<template slot="content">
<form name="files" enctype="multipart/form-data" method="post">
<div class="file-update-model"
@drop.prevent="_onDrop"
@dragover.prevent="dragOver = true"
@dragleave.prevent="dragOver = false"
id="file-update-model">
<div class="tooltip-info">
<em class="ans el-icon-warning"></em>
<span>{{$t('Drag the file into the current upload window')}}</span>
</div>
<div class="update-popup" v-if="dragOver">
<div class="icon-box">
<em class="ans el-icon-upload"></em>
</div>
<p class="p1">
<span>{{$t('Drag area upload')}}</span>
</p>
</div>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('File Name')}}</template>
<template slot="content">
<el-input
type="input"
size="small"
v-model="name"
:disabled="true"
:placeholder="$t('Please enter name')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Description')}}</template>
<template slot="content">
<el-input
type="textarea"
size="small"
v-model="description"
:disabled="progress !== 0"
:placeholder="$t('Please enter description')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Upload Files')}}</template>
<template slot="content">
<div class="file-update-box">
<template v-if="progress === 0">
<input ref="file" name="file" type="file" class="file-update" @change="_onChange">
<el-button size="mini"> {{$t('Upload')}} </el-button>
</template>
<div class="progress-box" v-if="progress !== 0">
<m-progress-bar :value="progress" text-placement="left-right"></m-progress-bar>
</div>
</div>
</template>
</m-list-box-f>
</div>
</form>
</template>
</m-popup>
</template>
<script>
import io from '@/module/io'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
import mProgressBar from '@/module/components/progressBar/progressBar'
export default {
name: 'file-upload',
data () {
return {
store,
// name
name: '',
// description
description: '',
// progress
progress: 0,
// file
file: null,
currentDir: '/',
id: null,
// Whether to drag upload
dragOver: false
}
},
watch: {
},
props: {
originalFileData: Object
},
methods: {
/**
* submit
*/
_ok () {
this.$refs.popup.spinnerLoading = true
if (this._validation()) {
if (this.originalFileData.fileName === this.name) {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if (isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs.popup.spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs.popup.spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs.popup.spinnerLoading = false
}
} else {
const params = { fullName: this.currentDir + this.name, type: 'FILE' }
this.store.dispatch('resource/resourceVerifyName', params).then(res => {
const isLt1024M = this.file.size / 1024 / 1024 < 1024
if (isLt1024M) {
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs.popup.spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs.popup.spinnerLoading = false
})
} else {
this.$message.warning(`${i18n.$t('Upload File Size')}`)
this.$refs.popup.spinnerLoading = false
}
}).catch(e => {
this.$message.error(e.msg || '')
this.$refs.popup.spinnerLoading = false
})
}
} else {
this.$refs.popup.spinnerLoading = false
}
},
_close () {
this.$emit('closeFileUpload')
},
/**
* validation
*/
_validation () {
if (!this.name) {
this.$message.warning(`${i18n.$t('Please enter file name')}`)
return false
}
if (!this.file) {
this.$message.warning(`${i18n.$t('Please select the file to upload')}`)
return false
}
return true
},
/**
* update file
*/
_formDataUpdate () {
return new Promise((resolve, reject) => {
let self = this
let formData = new FormData()
formData.append('file', this.file)
formData.append('name', this.name)
formData.append('description', this.description)
formData.append('type', 'FILE')
io.put('resources/' + this.id, res => {
this.$message.success(res.msg)
resolve()
self.$emit('onUploadFile')
this.reset()
}, e => {
reject(e)
self.$emit('closeFileUpload')
this.$message.error(e.msg || '')
this.reset()
}, {
data: formData,
emulateJSON: false,
onUploadProgress (progressEvent) {
// Size has been uploaded
let loaded = progressEvent.loaded
// Total attachment size
let total = progressEvent.total
self.progress = Math.floor(100 * loaded / total)
self.$emit('onProgress', self.progress)
}
})
})
},
/**
* Archive to the top right corner Continue uploading
*/
_ckArchive () {
$('.update-file-modal').hide()
this.$emit('onArchive')
},
reset () {
this.name = ''
this.description = ''
this.progress = 0
this.file = null
this.currentDir = '/'
this.dragOver = false
},
/**
* Drag and drop upload
*/
_onDrop (e) {
let file = e.dataTransfer.files[0]
this.file = file
this.name = file.name
this.dragOver = false
},
_onChange () {
let file = this.$refs.file.files[0]
this.file = file
this.name = file.name
this.$refs.file.value = null
}
},
mounted () {
this.reset()
this.id = this.originalFileData.id
this.name = this.originalFileData.fileName
if (this.originalFileData.desc) {
this.description = this.originalFileData.desc
}
this.currentDir = this.originalFileData.fullName.substring(0, this.originalFileData.fullName.length - this.originalFileData.fileName.length)
},
components: { mPopup, mListBoxF, mProgressBar }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.file-update-model {
.tooltip-info {
position: absolute;
left: 20px;
bottom: 26px;
span {
font-size: 12px;
color: #666;
vertical-align: middle;
}
.fa,.ans {
color: #0097e0;
font-size: 14px;
vertical-align: middle;
}
}
.hide-archive {
position: absolute;
right: 22px;
top: 17px;
.fa,.ans{
font-size: 16px;
color: #333;
font-weight: normal;
cursor: pointer;
&:hover {
color: #0097e0;
}
}
}
.file-update-box {
padding-top: 4px;
position: relative;
.file-update {
width: 70px;
height: 40px;
position: absolute;
left: 0;
top: 0;
cursor: pointer;
filter: alpha(opacity=0);
-moz-opacity: 0;
opacity: 0;
}
&:hover {
.v-btn-dashed {
background-color: transparent;
border-color: #47c3ff;
color: #47c3ff;
cursor: pointer;
}
}
.progress-box {
width: 200px;
position: absolute;
left: 70px;
top: 14px;
}
}
.update-popup {
width: calc(100% - 20px);
height: calc(100% - 20px);
background: rgba(255,253,239,.7);
position: absolute;
top: 10px;
left: 10px;
border-radius: 3px;
z-index: 1;
border: .18rem dashed #cccccc;
.icon-box {
text-align: center;
margin-top: 96px;
.fa,.ans {
font-size: 50px;
color: #2d8cf0;
}
}
.p1 {
text-align: center;
font-size: 16px;
color: #333;
padding-top: 8px;
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,398 | [Bug] [SQL] There is a syntax error in the file of dolphinscheduler_postgresql.sql. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I used the file of 'dolphinscheduler_postgresql.sql' to initialize the database the process was failed. Because the DDL script about creating the table 't_ds_k8s_namespace' contains a syntax error. As using postgresql as the backend database the postgresql database doesn't support the syntax 'int(11)'.
![image](https://user-images.githubusercontent.com/4928204/154208066-b72fcc61-c800-4001-bcc8-6b2b4a9dc4a5.png)
### What you expected to happen
I expect that I can use the file of 'dolphinscheduler_postgresql.sql' to initialize the database.
### How to reproduce
When using the file of 'dolphinscheduler_postgresql.sql' to initialize the database you will see that.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8398 | https://github.com/apache/dolphinscheduler/pull/8399 | ca086c037e2cf53bceea64c4cfa600b04e8667e6 | f18f28cc4a69e7e0d6bf76724ea721af07f87304 | "2022-02-16T06:31:22Z" | java | "2022-02-16T07:10:19Z" | dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
DROP TABLE IF EXISTS QRTZ_LOCKS;
DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
DROP TABLE IF EXISTS QRTZ_CALENDARS;
CREATE TABLE QRTZ_JOB_DETAILS (
SCHED_NAME character varying(120) NOT NULL,
JOB_NAME character varying(200) NOT NULL,
JOB_GROUP character varying(200) NOT NULL,
DESCRIPTION character varying(250) NULL,
JOB_CLASS_NAME character varying(250) NOT NULL,
IS_DURABLE boolean NOT NULL,
IS_NONCONCURRENT boolean NOT NULL,
IS_UPDATE_DATA boolean NOT NULL,
REQUESTS_RECOVERY boolean NOT NULL,
JOB_DATA bytea NULL
);
alter table QRTZ_JOB_DETAILS add primary key(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE TABLE QRTZ_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
JOB_NAME character varying(200) NOT NULL,
JOB_GROUP character varying(200) NOT NULL,
DESCRIPTION character varying(250) NULL,
NEXT_FIRE_TIME BIGINT NULL,
PREV_FIRE_TIME BIGINT NULL,
PRIORITY INTEGER NULL,
TRIGGER_STATE character varying(16) NOT NULL,
TRIGGER_TYPE character varying(8) NOT NULL,
START_TIME BIGINT NOT NULL,
END_TIME BIGINT NULL,
CALENDAR_NAME character varying(200) NULL,
MISFIRE_INSTR SMALLINT NULL,
JOB_DATA bytea NULL
) ;
alter table QRTZ_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_SIMPLE_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
REPEAT_COUNT BIGINT NOT NULL,
REPEAT_INTERVAL BIGINT NOT NULL,
TIMES_TRIGGERED BIGINT NOT NULL
) ;
alter table QRTZ_SIMPLE_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_CRON_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
CRON_EXPRESSION character varying(120) NOT NULL,
TIME_ZONE_ID character varying(80)
) ;
alter table QRTZ_CRON_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_SIMPROP_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
STR_PROP_1 character varying(512) NULL,
STR_PROP_2 character varying(512) NULL,
STR_PROP_3 character varying(512) NULL,
INT_PROP_1 INT NULL,
INT_PROP_2 INT NULL,
LONG_PROP_1 BIGINT NULL,
LONG_PROP_2 BIGINT NULL,
DEC_PROP_1 NUMERIC(13,4) NULL,
DEC_PROP_2 NUMERIC(13,4) NULL,
BOOL_PROP_1 boolean NULL,
BOOL_PROP_2 boolean NULL
) ;
alter table QRTZ_SIMPROP_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_BLOB_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
BLOB_DATA bytea NULL
) ;
alter table QRTZ_BLOB_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_CALENDARS (
SCHED_NAME character varying(120) NOT NULL,
CALENDAR_NAME character varying(200) NOT NULL,
CALENDAR bytea NOT NULL
) ;
alter table QRTZ_CALENDARS add primary key(SCHED_NAME,CALENDAR_NAME);
CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL
) ;
alter table QRTZ_PAUSED_TRIGGER_GRPS add primary key(SCHED_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_FIRED_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
ENTRY_ID character varying(200) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
INSTANCE_NAME character varying(200) NOT NULL,
FIRED_TIME BIGINT NOT NULL,
SCHED_TIME BIGINT NOT NULL,
PRIORITY INTEGER NOT NULL,
STATE character varying(16) NOT NULL,
JOB_NAME character varying(200) NULL,
JOB_GROUP character varying(200) NULL,
IS_NONCONCURRENT boolean NULL,
REQUESTS_RECOVERY boolean NULL
) ;
alter table QRTZ_FIRED_TRIGGERS add primary key(SCHED_NAME,ENTRY_ID);
CREATE TABLE QRTZ_SCHEDULER_STATE (
SCHED_NAME character varying(120) NOT NULL,
INSTANCE_NAME character varying(200) NOT NULL,
LAST_CHECKIN_TIME BIGINT NOT NULL,
CHECKIN_INTERVAL BIGINT NOT NULL
) ;
alter table QRTZ_SCHEDULER_STATE add primary key(SCHED_NAME,INSTANCE_NAME);
CREATE TABLE QRTZ_LOCKS (
SCHED_NAME character varying(120) NOT NULL,
LOCK_NAME character varying(40) NOT NULL
) ;
alter table QRTZ_LOCKS add primary key(SCHED_NAME,LOCK_NAME);
CREATE INDEX IDX_QRTZ_J_REQ_RECOVERY ON QRTZ_JOB_DETAILS(SCHED_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_J_GRP ON QRTZ_JOB_DETAILS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_J ON QRTZ_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_JG ON QRTZ_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_C ON QRTZ_TRIGGERS(SCHED_NAME,CALENDAR_NAME);
CREATE INDEX IDX_QRTZ_T_G ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_T_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_G_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NEXT_FIRE_TIME ON QRTZ_TRIGGERS(SCHED_NAME,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE_GRP ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_FT_TRIG_INST_NAME ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME);
CREATE INDEX IDX_QRTZ_FT_INST_JOB_REQ_RCVRY ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_FT_J_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_JG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_T_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
--
-- Table structure for table t_ds_access_token
--
DROP TABLE IF EXISTS t_ds_access_token;
CREATE TABLE t_ds_access_token (
id int NOT NULL ,
user_id int DEFAULT NULL ,
token varchar(64) DEFAULT NULL ,
expire_time timestamp DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_alert
--
DROP TABLE IF EXISTS t_ds_alert;
CREATE TABLE t_ds_alert (
id int NOT NULL ,
title varchar(64) DEFAULT NULL ,
content text ,
alert_status int DEFAULT '0' ,
log text ,
alertgroup_id int DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_alertgroup
--
DROP TABLE IF EXISTS t_ds_alertgroup;
CREATE TABLE t_ds_alertgroup(
id int NOT NULL,
alert_instance_ids varchar (255) DEFAULT NULL,
create_user_id int4 DEFAULT NULL,
group_name varchar(255) DEFAULT NULL,
description varchar(255) DEFAULT NULL,
create_time timestamp DEFAULT NULL,
update_time timestamp DEFAULT NULL,
PRIMARY KEY (id),
CONSTRAINT t_ds_alertgroup_name_un UNIQUE (group_name)
) ;
--
-- Table structure for table t_ds_command
--
DROP TABLE IF EXISTS t_ds_command;
CREATE TABLE t_ds_command (
id int NOT NULL ,
command_type int DEFAULT NULL ,
process_definition_code bigint NOT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
executor_id int DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
environment_code bigint DEFAULT '-1',
dry_run int DEFAULT '0' ,
process_instance_id int DEFAULT 0,
process_definition_version int DEFAULT 0,
PRIMARY KEY (id)
) ;
create index priority_id_index on t_ds_command (process_instance_priority,id);
--
-- Table structure for table t_ds_datasource
--
DROP TABLE IF EXISTS t_ds_datasource;
CREATE TABLE t_ds_datasource (
id int NOT NULL ,
name varchar(64) NOT NULL ,
note varchar(255) DEFAULT NULL ,
type int NOT NULL ,
user_id int NOT NULL ,
connection_params text NOT NULL ,
create_time timestamp NOT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id),
CONSTRAINT t_ds_datasource_name_un UNIQUE (name, type)
) ;
--
-- Table structure for table t_ds_error_command
--
DROP TABLE IF EXISTS t_ds_error_command;
CREATE TABLE t_ds_error_command (
id int NOT NULL ,
command_type int DEFAULT NULL ,
process_definition_code bigint NOT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
executor_id int DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
environment_code bigint DEFAULT '-1',
dry_run int DEFAULT '0' ,
message text ,
process_instance_id int DEFAULT 0,
process_definition_version int DEFAULT 0,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_process_definition
--
DROP TABLE IF EXISTS t_ds_process_definition;
CREATE TABLE t_ds_process_definition (
id int NOT NULL ,
code bigint NOT NULL,
name varchar(255) DEFAULT NULL ,
version int NOT NULL ,
description text ,
project_code bigint DEFAULT NULL ,
release_state int DEFAULT NULL ,
user_id int DEFAULT NULL ,
global_params text ,
locations text ,
warning_group_id int DEFAULT NULL ,
flag int DEFAULT NULL ,
timeout int DEFAULT '0' ,
tenant_id int DEFAULT '-1' ,
execution_type int DEFAULT '0',
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id) ,
CONSTRAINT process_definition_unique UNIQUE (name, project_code)
) ;
create index process_definition_index on t_ds_process_definition (code,id);
--
-- Table structure for table t_ds_process_definition_log
--
DROP TABLE IF EXISTS t_ds_process_definition_log;
CREATE TABLE t_ds_process_definition_log (
id int NOT NULL ,
code bigint NOT NULL,
name varchar(255) DEFAULT NULL ,
version int NOT NULL ,
description text ,
project_code bigint DEFAULT NULL ,
release_state int DEFAULT NULL ,
user_id int DEFAULT NULL ,
global_params text ,
locations text ,
warning_group_id int DEFAULT NULL ,
flag int DEFAULT NULL ,
timeout int DEFAULT '0' ,
tenant_id int DEFAULT '-1' ,
execution_type int DEFAULT '0',
operator int DEFAULT NULL ,
operate_time timestamp DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_task_definition
--
DROP TABLE IF EXISTS t_ds_task_definition;
CREATE TABLE t_ds_task_definition (
id int NOT NULL ,
code bigint NOT NULL,
name varchar(255) DEFAULT NULL ,
version int NOT NULL ,
description text ,
project_code bigint DEFAULT NULL ,
user_id int DEFAULT NULL ,
task_type varchar(50) DEFAULT NULL ,
task_params text ,
flag int DEFAULT NULL ,
task_priority int DEFAULT NULL ,
worker_group varchar(255) DEFAULT NULL ,
environment_code bigint DEFAULT '-1',
fail_retry_times int DEFAULT NULL ,
fail_retry_interval int DEFAULT NULL ,
timeout_flag int DEFAULT NULL ,
timeout_notify_strategy int DEFAULT NULL ,
timeout int DEFAULT '0' ,
delay_time int DEFAULT '0' ,
task_group_id int DEFAULT NULL,
task_group_priority int DEFAULT '0',
resource_ids text ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index task_definition_index on t_ds_task_definition (project_code,id);
--
-- Table structure for table t_ds_task_definition_log
--
DROP TABLE IF EXISTS t_ds_task_definition_log;
CREATE TABLE t_ds_task_definition_log (
id int NOT NULL ,
code bigint NOT NULL,
name varchar(255) DEFAULT NULL ,
version int NOT NULL ,
description text ,
project_code bigint DEFAULT NULL ,
user_id int DEFAULT NULL ,
task_type varchar(50) DEFAULT NULL ,
task_params text ,
flag int DEFAULT NULL ,
task_priority int DEFAULT NULL ,
worker_group varchar(255) DEFAULT NULL ,
environment_code bigint DEFAULT '-1',
fail_retry_times int DEFAULT NULL ,
fail_retry_interval int DEFAULT NULL ,
timeout_flag int DEFAULT NULL ,
timeout_notify_strategy int DEFAULT NULL ,
timeout int DEFAULT '0' ,
delay_time int DEFAULT '0' ,
resource_ids text ,
operator int DEFAULT NULL ,
task_group_id int DEFAULT NULL,
task_group_priority int DEFAULT '0',
operate_time timestamp DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index idx_task_definition_log_code_version on t_ds_task_definition_log (code,version);
create index idx_task_definition_log_project_code on t_ds_task_definition_log (project_code);
--
-- Table structure for table t_ds_process_task_relation
--
DROP TABLE IF EXISTS t_ds_process_task_relation;
CREATE TABLE t_ds_process_task_relation (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
project_code bigint DEFAULT NULL ,
process_definition_code bigint DEFAULT NULL ,
process_definition_version int DEFAULT NULL ,
pre_task_code bigint DEFAULT NULL ,
pre_task_version int DEFAULT '0' ,
post_task_code bigint DEFAULT NULL ,
post_task_version int DEFAULT '0' ,
condition_type int DEFAULT NULL ,
condition_params text ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index process_task_relation_idx_project_code_process_definition_code on t_ds_process_task_relation (project_code,process_definition_code);
--
-- Table structure for table t_ds_process_task_relation_log
--
DROP TABLE IF EXISTS t_ds_process_task_relation_log;
CREATE TABLE t_ds_process_task_relation_log (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
project_code bigint DEFAULT NULL ,
process_definition_code bigint DEFAULT NULL ,
process_definition_version int DEFAULT NULL ,
pre_task_code bigint DEFAULT NULL ,
pre_task_version int DEFAULT '0' ,
post_task_code bigint DEFAULT NULL ,
post_task_version int DEFAULT '0' ,
condition_type int DEFAULT NULL ,
condition_params text ,
operator int DEFAULT NULL ,
operate_time timestamp DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index process_task_relation_log_idx_project_code_process_definition_code on t_ds_process_task_relation_log (project_code,process_definition_code);
--
-- Table structure for table t_ds_process_instance
--
DROP TABLE IF EXISTS t_ds_process_instance;
CREATE TABLE t_ds_process_instance (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
process_definition_code bigint DEFAULT NULL ,
process_definition_version int DEFAULT NULL ,
state int DEFAULT NULL ,
recovery int DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
end_time timestamp DEFAULT NULL ,
run_times int DEFAULT NULL ,
host varchar(135) DEFAULT NULL ,
command_type int DEFAULT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
max_try_times int DEFAULT '0' ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
command_start_time timestamp DEFAULT NULL ,
global_params text ,
process_instance_json text ,
flag int DEFAULT '1' ,
update_time timestamp NULL ,
is_sub_process int DEFAULT '0' ,
executor_id int NOT NULL ,
history_cmd text ,
dependence_schedule_times text ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64) ,
environment_code bigint DEFAULT '-1',
timeout int DEFAULT '0' ,
tenant_id int NOT NULL DEFAULT '-1' ,
var_pool text ,
dry_run int DEFAULT '0' ,
next_process_instance_id int DEFAULT '0',
restart_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index process_instance_index on t_ds_process_instance (process_definition_code,id);
create index start_time_index on t_ds_process_instance (start_time,end_time);
--
-- Table structure for table t_ds_project
--
DROP TABLE IF EXISTS t_ds_project;
CREATE TABLE t_ds_project (
id int NOT NULL ,
name varchar(100) DEFAULT NULL ,
code bigint NOT NULL,
description varchar(200) DEFAULT NULL ,
user_id int DEFAULT NULL ,
flag int DEFAULT '1' ,
create_time timestamp DEFAULT CURRENT_TIMESTAMP ,
update_time timestamp DEFAULT CURRENT_TIMESTAMP ,
PRIMARY KEY (id)
) ;
create index user_id_index on t_ds_project (user_id);
--
-- Table structure for table t_ds_queue
--
DROP TABLE IF EXISTS t_ds_queue;
CREATE TABLE t_ds_queue (
id int NOT NULL ,
queue_name varchar(64) DEFAULT NULL ,
queue varchar(64) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_relation_datasource_user
--
DROP TABLE IF EXISTS t_ds_relation_datasource_user;
CREATE TABLE t_ds_relation_datasource_user (
id int NOT NULL ,
user_id int NOT NULL ,
datasource_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
;
--
-- Table structure for table t_ds_relation_process_instance
--
DROP TABLE IF EXISTS t_ds_relation_process_instance;
CREATE TABLE t_ds_relation_process_instance (
id int NOT NULL ,
parent_process_instance_id int DEFAULT NULL ,
parent_task_instance_id int DEFAULT NULL ,
process_instance_id int DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_relation_project_user
--
DROP TABLE IF EXISTS t_ds_relation_project_user;
CREATE TABLE t_ds_relation_project_user (
id int NOT NULL ,
user_id int NOT NULL ,
project_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index relation_project_user_id_index on t_ds_relation_project_user (user_id);
--
-- Table structure for table t_ds_relation_resources_user
--
DROP TABLE IF EXISTS t_ds_relation_resources_user;
CREATE TABLE t_ds_relation_resources_user (
id int NOT NULL ,
user_id int NOT NULL ,
resources_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_relation_udfs_user
--
DROP TABLE IF EXISTS t_ds_relation_udfs_user;
CREATE TABLE t_ds_relation_udfs_user (
id int NOT NULL ,
user_id int NOT NULL ,
udf_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
;
--
-- Table structure for table t_ds_resources
--
DROP TABLE IF EXISTS t_ds_resources;
CREATE TABLE t_ds_resources (
id int NOT NULL ,
alias varchar(64) DEFAULT NULL ,
file_name varchar(64) DEFAULT NULL ,
description varchar(255) DEFAULT NULL ,
user_id int DEFAULT NULL ,
type int DEFAULT NULL ,
size bigint DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
pid int,
full_name varchar(64),
is_directory boolean DEFAULT FALSE,
PRIMARY KEY (id),
CONSTRAINT t_ds_resources_un UNIQUE (full_name, type)
) ;
--
-- Table structure for table t_ds_schedules
--
DROP TABLE IF EXISTS t_ds_schedules;
CREATE TABLE t_ds_schedules (
id int NOT NULL ,
process_definition_code bigint NOT NULL ,
start_time timestamp NOT NULL ,
end_time timestamp NOT NULL ,
timezone_id varchar(40) default NULL ,
crontab varchar(255) NOT NULL ,
failure_strategy int NOT NULL ,
user_id int NOT NULL ,
release_state int NOT NULL ,
warning_type int NOT NULL ,
warning_group_id int DEFAULT NULL ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
environment_code bigint DEFAULT '-1',
create_time timestamp NOT NULL ,
update_time timestamp NOT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_session
--
DROP TABLE IF EXISTS t_ds_session;
CREATE TABLE t_ds_session (
id varchar(64) NOT NULL ,
user_id int DEFAULT NULL ,
ip varchar(45) DEFAULT NULL ,
last_login_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_task_instance
--
DROP TABLE IF EXISTS t_ds_task_instance;
CREATE TABLE t_ds_task_instance (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
task_type varchar(50) DEFAULT NULL ,
task_code bigint NOT NULL,
task_definition_version int DEFAULT NULL ,
process_instance_id int DEFAULT NULL ,
state int DEFAULT NULL ,
submit_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
end_time timestamp DEFAULT NULL ,
host varchar(135) DEFAULT NULL ,
execute_path varchar(200) DEFAULT NULL ,
log_path varchar(200) DEFAULT NULL ,
alert_flag int DEFAULT NULL ,
retry_times int DEFAULT '0' ,
pid int DEFAULT NULL ,
app_link text ,
task_params text ,
flag int DEFAULT '1' ,
retry_interval int DEFAULT NULL ,
max_retry_times int DEFAULT NULL ,
task_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
environment_code bigint DEFAULT '-1',
environment_config text,
executor_id int DEFAULT NULL ,
first_submit_time timestamp DEFAULT NULL ,
delay_time int DEFAULT '0' ,
task_group_id int DEFAULT NULL,
var_pool text ,
dry_run int DEFAULT '0' ,
PRIMARY KEY (id),
CONSTRAINT foreign_key_instance_id FOREIGN KEY(process_instance_id) REFERENCES t_ds_process_instance(id) ON DELETE CASCADE
) ;
create index idx_task_instance_code_version on t_ds_task_instance (task_code, task_definition_version);
--
-- Table structure for table t_ds_tenant
--
DROP TABLE IF EXISTS t_ds_tenant;
CREATE TABLE t_ds_tenant (
id int NOT NULL ,
tenant_code varchar(64) DEFAULT NULL ,
description varchar(255) DEFAULT NULL ,
queue_id int DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_udfs
--
DROP TABLE IF EXISTS t_ds_udfs;
CREATE TABLE t_ds_udfs (
id int NOT NULL ,
user_id int NOT NULL ,
func_name varchar(100) NOT NULL ,
class_name varchar(255) NOT NULL ,
type int NOT NULL ,
arg_types varchar(255) DEFAULT NULL ,
database varchar(255) DEFAULT NULL ,
description varchar(255) DEFAULT NULL ,
resource_id int NOT NULL ,
resource_name varchar(255) NOT NULL ,
create_time timestamp NOT NULL ,
update_time timestamp NOT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_user
--
DROP TABLE IF EXISTS t_ds_user;
CREATE TABLE t_ds_user (
id int NOT NULL ,
user_name varchar(64) DEFAULT NULL ,
user_password varchar(64) DEFAULT NULL ,
user_type int DEFAULT NULL ,
email varchar(64) DEFAULT NULL ,
phone varchar(11) DEFAULT NULL ,
tenant_id int DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
queue varchar(64) DEFAULT NULL ,
state int DEFAULT 1 ,
PRIMARY KEY (id)
);
comment on column t_ds_user.state is 'state 0:disable 1:enable';
--
-- Table structure for table t_ds_version
--
DROP TABLE IF EXISTS t_ds_version;
CREATE TABLE t_ds_version (
id int NOT NULL ,
version varchar(200) NOT NULL,
PRIMARY KEY (id)
) ;
create index version_index on t_ds_version(version);
--
-- Table structure for table t_ds_worker_group
--
DROP TABLE IF EXISTS t_ds_worker_group;
CREATE TABLE t_ds_worker_group (
id bigint NOT NULL ,
name varchar(255) NOT NULL ,
addr_list text DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id) ,
CONSTRAINT name_unique UNIQUE (name)
) ;
--
-- Table structure for table t_ds_worker_server
--
DROP TABLE IF EXISTS t_ds_worker_server;
CREATE TABLE t_ds_worker_server (
id int NOT NULL ,
host varchar(45) DEFAULT NULL ,
port int DEFAULT NULL ,
zk_directory varchar(64) DEFAULT NULL ,
res_info varchar(255) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
last_heartbeat_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
DROP SEQUENCE IF EXISTS t_ds_access_token_id_sequence;
CREATE SEQUENCE t_ds_access_token_id_sequence;
ALTER TABLE t_ds_access_token ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_access_token_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_alert_id_sequence;
CREATE SEQUENCE t_ds_alert_id_sequence;
ALTER TABLE t_ds_alert ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alert_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_alertgroup_id_sequence;
CREATE SEQUENCE t_ds_alertgroup_id_sequence;
ALTER TABLE t_ds_alertgroup ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alertgroup_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_command_id_sequence;
CREATE SEQUENCE t_ds_command_id_sequence;
ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence;
CREATE SEQUENCE t_ds_datasource_id_sequence;
ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence;
CREATE SEQUENCE t_ds_process_definition_id_sequence;
ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_definition_log_id_sequence;
CREATE SEQUENCE t_ds_process_definition_log_id_sequence;
ALTER TABLE t_ds_process_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_log_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_task_definition_id_sequence;
CREATE SEQUENCE t_ds_task_definition_id_sequence;
ALTER TABLE t_ds_task_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_task_definition_log_id_sequence;
CREATE SEQUENCE t_ds_task_definition_log_id_sequence;
ALTER TABLE t_ds_task_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_log_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_task_relation_id_sequence;
CREATE SEQUENCE t_ds_process_task_relation_id_sequence;
ALTER TABLE t_ds_process_task_relation ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_task_relation_log_id_sequence;
CREATE SEQUENCE t_ds_process_task_relation_log_id_sequence;
ALTER TABLE t_ds_process_task_relation_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_log_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence;
CREATE SEQUENCE t_ds_process_instance_id_sequence;
ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_project_id_sequence;
CREATE SEQUENCE t_ds_project_id_sequence;
ALTER TABLE t_ds_project ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_project_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_queue_id_sequence;
CREATE SEQUENCE t_ds_queue_id_sequence;
ALTER TABLE t_ds_queue ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_queue_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_datasource_user_id_sequence;
CREATE SEQUENCE t_ds_relation_datasource_user_id_sequence;
ALTER TABLE t_ds_relation_datasource_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_datasource_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_process_instance_id_sequence;
CREATE SEQUENCE t_ds_relation_process_instance_id_sequence;
ALTER TABLE t_ds_relation_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_process_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_project_user_id_sequence;
CREATE SEQUENCE t_ds_relation_project_user_id_sequence;
ALTER TABLE t_ds_relation_project_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_resources_user_id_sequence;
CREATE SEQUENCE t_ds_relation_resources_user_id_sequence;
ALTER TABLE t_ds_relation_resources_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_resources_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_udfs_user_id_sequence;
CREATE SEQUENCE t_ds_relation_udfs_user_id_sequence;
ALTER TABLE t_ds_relation_udfs_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_udfs_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_resources_id_sequence;
CREATE SEQUENCE t_ds_resources_id_sequence;
ALTER TABLE t_ds_resources ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_resources_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_schedules_id_sequence;
CREATE SEQUENCE t_ds_schedules_id_sequence;
ALTER TABLE t_ds_schedules ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_schedules_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_task_instance_id_sequence;
CREATE SEQUENCE t_ds_task_instance_id_sequence;
ALTER TABLE t_ds_task_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_tenant_id_sequence;
CREATE SEQUENCE t_ds_tenant_id_sequence;
ALTER TABLE t_ds_tenant ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_tenant_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_udfs_id_sequence;
CREATE SEQUENCE t_ds_udfs_id_sequence;
ALTER TABLE t_ds_udfs ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_udfs_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_user_id_sequence;
CREATE SEQUENCE t_ds_user_id_sequence;
ALTER TABLE t_ds_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_version_id_sequence;
CREATE SEQUENCE t_ds_version_id_sequence;
ALTER TABLE t_ds_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_version_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_worker_group_id_sequence;
CREATE SEQUENCE t_ds_worker_group_id_sequence;
ALTER TABLE t_ds_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_group_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_worker_server_id_sequence;
CREATE SEQUENCE t_ds_worker_server_id_sequence;
ALTER TABLE t_ds_worker_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_server_id_sequence');
-- Records of t_ds_user?user : admin , password : dolphinscheduler123
INSERT INTO t_ds_user(user_name, user_password, user_type, email, phone, tenant_id, state, create_time, update_time)
VALUES ('admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', 1, '2018-03-27 15:48:50', '2018-10-24 17:40:22');
-- Records of t_ds_alertgroup, default admin warning group
INSERT INTO t_ds_alertgroup(alert_instance_ids, create_user_id, group_name, description, create_time, update_time)
VALUES ('1,2', 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39');
-- Records of t_ds_queue,default queue name : default
INSERT INTO t_ds_queue(queue_name, queue, create_time, update_time)
VALUES ('default', 'default', '2018-11-29 10:22:33', '2018-11-29 10:22:33');
-- Records of t_ds_queue,default queue name : default
INSERT INTO t_ds_version(version) VALUES ('1.4.0');
--
-- Table structure for table t_ds_plugin_define
--
DROP TABLE IF EXISTS t_ds_plugin_define;
CREATE TABLE t_ds_plugin_define (
id serial NOT NULL,
plugin_name varchar(100) NOT NULL,
plugin_type varchar(100) NOT NULL,
plugin_params text NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_plugin_define_pk PRIMARY KEY (id),
CONSTRAINT t_ds_plugin_define_un UNIQUE (plugin_name, plugin_type)
);
--
-- Table structure for table t_ds_alert_plugin_instance
--
DROP TABLE IF EXISTS t_ds_alert_plugin_instance;
CREATE TABLE t_ds_alert_plugin_instance (
id serial NOT NULL,
plugin_define_id int4 NOT NULL,
plugin_instance_params text NULL,
create_time timestamp NULL,
update_time timestamp NULL,
instance_name varchar(200) NULL,
CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_dq_comparison_type
--
DROP TABLE IF EXISTS t_ds_dq_comparison_type;
CREATE TABLE t_ds_dq_comparison_type (
id serial NOT NULL,
"type" varchar NOT NULL,
execute_sql varchar NULL,
output_table varchar NULL,
"name" varchar NULL,
create_time timestamp NULL,
update_time timestamp NULL,
is_inner_source bool NULL,
CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(2, 'DailyFluctuation', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(3, 'WeeklyFluctuation', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'week_range', 'week_range.week_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(4, 'MonthlyFluctuation', 'select round(avg(statistics_value),2) as month_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''MONTH'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'month_range', 'month_range.month_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(5, 'Last7DayFluctuation', 'select round(avg(statistics_value),2) as last_7_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-7) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_seven_days', 'last_seven_days.last_7_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(6, 'Last30DayFluctuation', 'select round(avg(statistics_value),2) as last_30_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
INSERT INTO t_ds_dq_comparison_type
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
--
-- Table structure for table t_ds_dq_execute_result
--
DROP TABLE IF EXISTS t_ds_dq_execute_result;
CREATE TABLE t_ds_dq_execute_result (
id serial NOT NULL,
process_definition_id int4 NULL,
process_instance_id int4 NULL,
task_instance_id int4 NULL,
rule_type int4 NULL,
rule_name varchar(255) DEFAULT NULL,
statistics_value float8 NULL,
comparison_value float8 NULL,
check_type int4 NULL,
threshold float8 NULL,
"operator" int4 NULL,
failure_strategy int4 NULL,
state int4 NULL,
user_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
comparison_type int4 NULL,
error_output_path text NULL,
CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_dq_rule
--
DROP TABLE IF EXISTS t_ds_dq_rule;
CREATE TABLE t_ds_dq_rule (
id serial NOT NULL,
"name" varchar(100) DEFAULT NULL,
"type" int4 NULL,
user_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
INSERT INTO t_ds_dq_rule
(id, "name", "type", user_id, create_time, update_time)
VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
--
-- Table structure for table t_ds_dq_rule_execute_sql
--
DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
CREATE TABLE t_ds_dq_rule_execute_sql (
id serial NOT NULL,
"index" int4 NULL,
"sql" text NULL,
table_alias varchar(255) DEFAULT NULL,
"type" int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
is_error_output_sql bool NULL,
CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_execute_sql
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_dq_rule_input_entry
--
DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
CREATE TABLE t_ds_dq_rule_input_entry (
id serial NOT NULL,
field varchar(255) DEFAULT NULL,
"type" varchar(255) DEFAULT NULL,
title varchar(255) DEFAULT NULL,
value varchar(255) DEFAULT NULL,
"options" text DEFAULT NULL,
placeholder varchar(255) DEFAULT NULL,
option_source_type int4 NULL,
value_type int4 NULL,
input_type int4 NULL,
is_show int2 NULL DEFAULT '1'::smallint,
can_edit int2 NULL DEFAULT '1'::smallint,
is_emit int2 NULL DEFAULT '0'::smallint,
is_validate int2 NULL DEFAULT '0'::smallint,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_dq_rule_input_entry
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_dq_task_statistics_value
--
DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
CREATE TABLE t_ds_dq_task_statistics_value (
id serial NOT NULL,
process_definition_id int4 NOT NULL,
task_instance_id int4 NULL,
rule_id int4 NOT NULL,
unique_code varchar NOT NULL,
statistics_name varchar NULL,
statistics_value float8 NULL,
data_time timestamp(0) NULL,
create_time timestamp(0) NULL,
update_time timestamp(0) NULL,
CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_relation_rule_execute_sql
--
DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
CREATE TABLE t_ds_relation_rule_execute_sql (
id serial NOT NULL,
rule_id int4 NULL,
execute_sql_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_execute_sql
(id, rule_id, execute_sql_id, create_time, update_time)
VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_relation_rule_input_entry
--
DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
CREATE TABLE t_ds_relation_rule_input_entry (
id serial NOT NULL,
rule_id int4 NULL,
rule_input_entry_id int4 NULL,
values_map text NULL,
"index" int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id)
);
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_environment
--
DROP TABLE IF EXISTS t_ds_environment;
CREATE TABLE t_ds_environment (
id serial NOT NULL,
code bigint NOT NULL,
name varchar(100) DEFAULT NULL,
config text DEFAULT NULL,
description text,
operator int DEFAULT NULL,
create_time timestamp DEFAULT NULL,
update_time timestamp DEFAULT NULL,
PRIMARY KEY (id),
CONSTRAINT environment_name_unique UNIQUE (name),
CONSTRAINT environment_code_unique UNIQUE (code)
);
--
-- Table structure for table t_ds_environment_worker_group_relation
--
DROP TABLE IF EXISTS t_ds_environment_worker_group_relation;
CREATE TABLE t_ds_environment_worker_group_relation (
id serial NOT NULL,
environment_code bigint NOT NULL,
worker_group varchar(255) NOT NULL,
operator int DEFAULT NULL,
create_time timestamp DEFAULT NULL,
update_time timestamp DEFAULT NULL,
PRIMARY KEY (id) ,
CONSTRAINT environment_worker_group_unique UNIQUE (environment_code,worker_group)
);
--
-- Table structure for table t_ds_task_group_queue
--
DROP TABLE IF EXISTS t_ds_task_group_queue;
CREATE TABLE t_ds_task_group_queue (
id serial NOT NULL,
task_id int DEFAULT NULL ,
task_name VARCHAR(100) DEFAULT NULL ,
group_id int DEFAULT NULL ,
process_id int DEFAULT NULL ,
priority int DEFAULT '0' ,
status int DEFAULT '-1' ,
force_start int DEFAULT '0' ,
in_queue int DEFAULT '0' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_task_group
--
DROP TABLE IF EXISTS t_ds_task_group;
CREATE TABLE t_ds_task_group (
id serial NOT NULL,
name varchar(100) DEFAULT NULL ,
description varchar(200) DEFAULT NULL ,
group_size int NOT NULL ,
project_code bigint DEFAULT '0' ,
use_size int DEFAULT '0' ,
user_id int DEFAULT NULL ,
status int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY(id)
);
-- ----------------------------
-- Table structure for t_ds_audit_log
-- ----------------------------
DROP TABLE IF EXISTS t_ds_audit_log;
CREATE TABLE t_ds_audit_log (
id serial NOT NULL,
user_id int NOT NULL,
resource_type int NOT NULL,
operation int NOT NULL,
time timestamp DEFAULT NULL ,
resource_id int NOT NULL,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_k8s
--
DROP TABLE IF EXISTS t_ds_k8s;
CREATE TABLE t_ds_k8s (
id serial NOT NULL,
k8s_name VARCHAR(100) DEFAULT NULL ,
k8s_config text ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_k8s_namespace
--
DROP TABLE IF EXISTS t_ds_k8s_namespace;
CREATE TABLE t_ds_k8s_namespace (
id serial NOT NULL,
limits_memory int DEFAULT NULL ,
namespace varchar(100) DEFAULT NULL ,
online_job_num int DEFAULT '0' ,
owner varchar(100) DEFAULT NULL,
pod_replicas int(11) DEFAULT NULL,
pod_request_cpu NUMERIC(13,4) NULL,
pod_request_memory int(11) DEFAULT NULL,
tag varchar(100) DEFAULT NULL,
limits_cpu NUMERIC(13,4) NULL,
k8s varchar(100) DEFAULT NULL,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id) ,
CONSTRAINT k8s_namespace_unique UNIQUE (namespace,k8s)
);
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,398 | [Bug] [SQL] There is a syntax error in the file of dolphinscheduler_postgresql.sql. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I used the file of 'dolphinscheduler_postgresql.sql' to initialize the database the process was failed. Because the DDL script about creating the table 't_ds_k8s_namespace' contains a syntax error. As using postgresql as the backend database the postgresql database doesn't support the syntax 'int(11)'.
![image](https://user-images.githubusercontent.com/4928204/154208066-b72fcc61-c800-4001-bcc8-6b2b4a9dc4a5.png)
### What you expected to happen
I expect that I can use the file of 'dolphinscheduler_postgresql.sql' to initialize the database.
### How to reproduce
When using the file of 'dolphinscheduler_postgresql.sql' to initialize the database you will see that.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8398 | https://github.com/apache/dolphinscheduler/pull/8399 | ca086c037e2cf53bceea64c4cfa600b04e8667e6 | f18f28cc4a69e7e0d6bf76724ea721af07f87304 | "2022-02-16T06:31:22Z" | java | "2022-02-16T07:10:19Z" | dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_ddl.sql | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
delimiter d//
CREATE OR REPLACE FUNCTION public.dolphin_update_metadata(
)
RETURNS character varying
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
AS $BODY$
DECLARE
v_schema varchar;
BEGIN
---get schema name
v_schema =current_schema();
EXECUTE 'DROP INDEX IF EXISTS "process_task_relation_idx_project_code_process_definition_code"';
EXECUTE 'CREATE INDEX IF NOT EXISTS process_task_relation_idx_project_code_process_definition_code ON ' || quote_ident(v_schema) ||'.t_ds_process_task_relation USING Btree("project_code","process_definition_code")';
EXECUTE 'DROP INDEX IF EXISTS "process_task_relation_log_idx_project_code_process_definition_code"';
EXECUTE 'CREATE INDEX IF NOT EXISTS process_task_relation_log_idx_project_code_process_definition_code ON ' || quote_ident(v_schema) ||'.t_ds_process_task_relation_log USING Btree("project_code","process_definition_code")';
EXECUTE 'DROP INDEX IF EXISTS "idx_task_definition_log_code_version"';
EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_definition_log_code_version ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("code","version")';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_comparison_type" (
id serial NOT NULL,
"type" varchar NOT NULL,
execute_sql varchar NULL,
output_table varchar NULL,
"name" varchar NULL,
create_time timestamp NULL,
update_time timestamp NULL,
is_inner_source bool NULL,
CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_execute_result" (
id serial NOT NULL,
process_definition_id int4 NULL,
process_instance_id int4 NULL,
task_instance_id int4 NULL,
rule_type int4 NULL,
rule_name varchar(255) DEFAULT NULL,
statistics_value float8 NULL,
comparison_value float8 NULL,
check_type int4 NULL,
threshold float8 NULL,
"operator" int4 NULL,
failure_strategy int4 NULL,
state int4 NULL,
user_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
comparison_type int4 NULL,
error_output_path text NULL,
CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule" (
id serial NOT NULL,
"name" varchar(100) DEFAULT NULL,
"type" int4 NULL,
user_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule_execute_sql" (
id serial NOT NULL,
"index" int4 NULL,
"sql" text NULL,
table_alias varchar(255) DEFAULT NULL,
"type" int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
is_error_output_sql bool NULL,
CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule_input_entry" (
id serial NOT NULL,
field varchar(255) DEFAULT NULL,
"type" varchar(255) DEFAULT NULL,
title varchar(255) DEFAULT NULL,
value varchar(255) DEFAULT NULL,
"options" text DEFAULT NULL,
placeholder varchar(255) DEFAULT NULL,
option_source_type int4 NULL,
value_type int4 NULL,
input_type int4 NULL,
is_show int2 NULL DEFAULT "1"::smallint,
can_edit int2 NULL DEFAULT "1"::smallint,
is_emit int2 NULL DEFAULT "0"::smallint,
is_validate int2 NULL DEFAULT "0"::smallint,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_task_statistics_value" (
id serial NOT NULL,
process_definition_id int4 NOT NULL,
task_instance_id int4 NULL,
rule_id int4 NOT NULL,
unique_code varchar NOT NULL,
statistics_name varchar NULL,
statistics_value float8 NULL,
data_time timestamp(0) NULL,
create_time timestamp(0) NULL,
update_time timestamp(0) NULL,
CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_relation_rule_execute_sql" (
id serial NOT NULL,
rule_id int4 NULL,
execute_sql_id int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_relation_rule_input_entry" (
id serial NOT NULL,
rule_id int4 NULL,
rule_input_entry_id int4 NULL,
values_map text NULL,
"index" int4 NULL,
create_time timestamp NULL,
update_time timestamp NULL,
CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id)
)';
EXECUTE 'DROP INDEX IF EXISTS "idx_task_definition_log_project_code"';
EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_definition_log_project_code ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("project_code")';
EXECUTE 'DROP INDEX IF EXISTS "idx_task_instance_code_version"';
EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_instance_code_version ON' || quote_ident(v_schema) ||'.t_ds_task_instance USING Btree("task_code","task_definition_version")';
EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_k8s" (
id serial NOT NULL,
k8s_name VARCHAR(100) DEFAULT NULL ,
k8s_config text ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
)';
EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_k8s_namespace" (
id serial NOT NULL,
limits_memory int DEFAULT NULL ,
namespace varchar(100) DEFAULT NULL ,
online_job_num int DEFAULT ''0'' ,
owner varchar(100) DEFAULT NULL,
pod_replicas int(11) DEFAULT NULL,
pod_request_cpu NUMERIC(13,4) NULL,
pod_request_memory int(11) DEFAULT NULL,
tag varchar(100) DEFAULT NULL,
limits_cpu NUMERIC(13,4) NULL,
k8s varchar(100) DEFAULT NULL,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id) ,
CONSTRAINT k8s_namespace_unique UNIQUE (namespace,k8s)
)';
return 'Success!';
exception when others then
---Raise EXCEPTION '(%)',SQLERRM;
return SQLERRM;
END;
$BODY$;
select dolphin_update_metadata();
d//
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,406 | [Bug] [Project] Error in editing task save. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
I edited a task in the Task Definition page, and then saved.
![image](https://user-images.githubusercontent.com/97265214/154269846-9731afaf-8fd1-487d-8e11-103b6dad80b6.png)
I got this error.
![image](https://user-images.githubusercontent.com/97265214/154269730-abd7855f-3ac4-4d8f-9dca-55cf5d16bf72.png)
### What you expected to happen
Save successfully.
### How to reproduce
Open the task definition page and pick a record to edit it and save it.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8406 | https://github.com/apache/dolphinscheduler/pull/8405 | bc5f8b3b3482443dbd5654fc0ac8aafabbd65def | 71fcea593782b7dc5d5ee4133e5bbadbefcf963d | "2022-02-16T13:07:06Z" | java | "2022-02-16T15:06:32Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.TaskDefinitionService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.ConditionType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils;
import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskMainInfo;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
/**
* task definition service impl
*/
@Service
public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(TaskDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProcessService processService;
/**
* create task definition
*
* @param loginUser login user
* @param projectCode project code
* @param taskDefinitionJson task definition json
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> createTaskDefinition(User loginUser,
long projectCode,
String taskDefinitionJson) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (taskDefinitionLogs.isEmpty()) {
logger.error("taskDefinitionJson invalid: {}", taskDefinitionJson);
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) {
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) {
logger.error("task definition {} parameter invalid", taskDefinitionLog.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
Map<String, Object> resData = new HashMap<>();
resData.put("total", taskDefinitionLogs.size());
resData.put("code", StringUtils.join(taskDefinitionLogs.stream().map(TaskDefinition::getCode).collect(Collectors.toList()), ","));
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resData);
return result;
}
/**
* create single task definition that binds the workflow
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param taskDefinitionJsonObj task definition json object
* @param upstreamCodes upstream task codes, sep comma
* @return create result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> createTaskBindsWorkFlow(User loginUser,
long projectCode,
long processDefinitionCode,
String taskDefinitionJsonObj,
String upstreamCodes) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionCode);
return result;
}
TaskDefinitionLog taskDefinition = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class);
if (taskDefinition == null) {
logger.error("taskDefinitionJsonObj is not valid json");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj);
return result;
}
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinition)) {
logger.error("task definition {} parameter invalid", taskDefinition.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinition.getName());
return result;
}
long taskCode = taskDefinition.getCode();
if (taskCode == 0) {
try {
taskCode = CodeGenerateUtils.getInstance().genCode();
taskDefinition.setCode(taskCode);
} catch (CodeGenerateException e) {
logger.error("Task code get error, ", e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, taskDefinitionJsonObj);
return result;
}
}
if (StringUtils.isNotBlank(upstreamCodes)) {
Set<Long> upstreamTaskCodes = Arrays.stream(upstreamCodes.split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
List<TaskDefinition> upstreamTaskDefinitionList = taskDefinitionMapper.queryByCodeList(upstreamTaskCodes);
Set<Long> queryUpStreamTaskCodes = upstreamTaskDefinitionList.stream().map(TaskDefinition::getCode).collect(Collectors.toSet());
// upstreamTaskCodes - queryUpStreamTaskCodes
Set<Long> diffCode = upstreamTaskCodes.stream().filter(code -> !queryUpStreamTaskCodes.contains(code)).collect(Collectors.toSet());
if (!diffCode.isEmpty()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, StringUtils.join(diffCode, Constants.COMMA));
return result;
}
List<ProcessTaskRelationLog> processTaskRelationLogList = Lists.newArrayList();
for (TaskDefinition upstreamTask : upstreamTaskDefinitionList) {
ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog();
processTaskRelationLog.setPreTaskCode(upstreamTask.getCode());
processTaskRelationLog.setPreTaskVersion(upstreamTask.getVersion());
processTaskRelationLog.setPostTaskCode(taskCode);
processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST);
processTaskRelationLog.setConditionType(ConditionType.NONE);
processTaskRelationLog.setConditionParams("{}");
processTaskRelationLogList.add(processTaskRelationLog);
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
if (!processTaskRelationList.isEmpty()) {
processTaskRelationLogList.addAll(processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()));
}
int insertResult = processService.saveTaskRelation(loginUser, projectCode, processDefinition.getCode(), processDefinition.getVersion(),
processTaskRelationLogList, Lists.newArrayList(), Boolean.TRUE);
if (insertResult != Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, Lists.newArrayList(taskDefinition), Boolean.TRUE);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, taskDefinition);
return result;
}
/**
* query task definition
*
* @param loginUser login user
* @param projectCode project code
* @param taskName task name
*/
@Override
public Map<String, Object> queryTaskDefinitionByName(User loginUser, long projectCode, String taskName) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), taskName);
if (taskDefinition == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskName);
} else {
result.put(Constants.DATA_LIST, taskDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* delete task definition
* Only offline and no downstream dependency can be deleted
* @param loginUser login user
* @param projectCode project code
* @param taskCode task code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteTaskDefinitionByCode(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (taskCode == 0) {
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
return result;
}
if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) {
putMsg(result, Status.TASK_DEFINE_STATE_ONLINE, taskCode);
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByTaskCode(taskCode);
if (!processTaskRelationList.isEmpty()) {
Set<Long> postTaskCodes = processTaskRelationList
.stream()
.map(ProcessTaskRelation::getPostTaskCode)
.collect(Collectors.toSet());
putMsg(result, Status.TASK_HAS_DOWNSTREAM, StringUtils.join(postTaskCodes, ","));
return result;
}
int delete = taskDefinitionMapper.deleteByCode(taskCode);
if (delete > 0) {
List<ProcessTaskRelation> taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (!taskRelationList.isEmpty()) {
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> relationList = processTaskRelations.stream().filter(r -> r.getPostTaskCode() != taskCode).collect(Collectors.toList());
updateDag(loginUser, result, processDefinitionCode, relationList, Lists.newArrayList());
} else {
putMsg(result, Status.SUCCESS);
}
} else {
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
}
return result;
}
private void updateDag(User loginUser, Map<String, Object> result, long processDefinitionCode, List<ProcessTaskRelation> processTaskRelationList,
List<TaskDefinitionLog> taskDefinitionLogs) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST);
}
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) {
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
List<ProcessTaskRelationLog> relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(),
insertVersion, relationLogs, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
}
/**
* update task definition
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode task code
* @param taskDefinitionJsonObj task definition json object
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> updateTaskDefinition(User loginUser, long projectCode, long taskCode, String taskDefinitionJsonObj) {
Map<String, Object> result = new HashMap<>();
TaskDefinitionLog taskDefinitionToUpdate = updateTask(loginUser, projectCode, taskCode, taskDefinitionJsonObj, result);
if (taskDefinitionToUpdate == null) {
return result;
}
List<ProcessTaskRelation> taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (!taskRelationList.isEmpty()) {
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
updateDag(loginUser, result, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionToUpdate));
}
result.put(Constants.DATA_LIST, taskCode);
putMsg(result, Status.SUCCESS);
return result;
}
private TaskDefinitionLog updateTask(User loginUser, long projectCode, long taskCode, String taskDefinitionJsonObj, Map<String, Object> result) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
result.putAll(projectService.checkProjectAndAuth(loginUser, project, projectCode));
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return null;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
return null;
}
if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) {
putMsg(result, Status.NOT_SUPPORT_UPDATE_TASK_DEFINITION);
return null;
}
TaskDefinitionLog taskDefinitionToUpdate = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class);
if (taskDefinition.equals(taskDefinitionToUpdate)) {
return null;
}
if (taskDefinitionToUpdate == null) {
logger.error("taskDefinitionJson is not valid json");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj);
return null;
}
if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionToUpdate)) {
logger.error("task definition {} parameter invalid", taskDefinitionToUpdate.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionToUpdate.getName());
return null;
}
Integer version = taskDefinitionLogMapper.queryMaxVersionForDefinition(taskCode);
if (version == null || version == 0) {
putMsg(result, Status.DATA_IS_NOT_VALID, taskCode);
return null;
}
Date now = new Date();
taskDefinitionToUpdate.setCode(taskCode);
taskDefinitionToUpdate.setId(taskDefinition.getId());
taskDefinitionToUpdate.setProjectCode(projectCode);
taskDefinitionToUpdate.setUserId(taskDefinition.getUserId());
taskDefinitionToUpdate.setVersion(++version);
taskDefinitionToUpdate.setTaskType(taskDefinitionToUpdate.getTaskType().toUpperCase());
taskDefinitionToUpdate.setResourceIds(processService.getResourceIds(taskDefinitionToUpdate));
taskDefinitionToUpdate.setUpdateTime(now);
int update = taskDefinitionMapper.updateById(taskDefinitionToUpdate);
taskDefinitionToUpdate.setOperator(loginUser.getId());
taskDefinitionToUpdate.setOperateTime(now);
taskDefinitionToUpdate.setCreateTime(now);
int insert = taskDefinitionLogMapper.insert(taskDefinitionToUpdate);
if ((update & insert) != 1) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
return taskDefinitionToUpdate;
}
/**
* update task definition and upstream
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode task definition code
* @param taskDefinitionJsonObj task definition json object
* @param upstreamCodes upstream task codes, sep comma
* @return update result code
*/
@Override
public Map<String, Object> updateTaskWithUpstream(User loginUser, long projectCode, long taskCode, String taskDefinitionJsonObj, String upstreamCodes) {
Map<String, Object> result = new HashMap<>();
TaskDefinitionLog taskDefinitionToUpdate = updateTask(loginUser, projectCode, taskCode, taskDefinitionJsonObj, result);
if (result.get(Constants.STATUS) != Status.SUCCESS && taskDefinitionToUpdate == null) {
return result;
}
List<ProcessTaskRelation> upstreamTaskRelations = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
Set<Long> upstreamCodeSet = upstreamTaskRelations.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toSet());
Set<Long> upstreamTaskCodes = Arrays.stream(upstreamCodes.split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet());
if (CollectionUtils.isEqualCollection(upstreamCodeSet, upstreamTaskCodes) && taskDefinitionToUpdate == null) {
putMsg(result, Status.SUCCESS);
return result;
} else {
if (taskDefinitionToUpdate == null) {
taskDefinitionToUpdate = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class);
}
}
Map<Long, TaskDefinition> queryUpStreamTaskCodeMap;
if (!upstreamTaskCodes.isEmpty()) {
List<TaskDefinition> upstreamTaskDefinitionList = taskDefinitionMapper.queryByCodeList(upstreamTaskCodes);
queryUpStreamTaskCodeMap = upstreamTaskDefinitionList.stream().collect(Collectors.toMap(TaskDefinition::getCode, taskDefinition -> taskDefinition));
// upstreamTaskCodes - queryUpStreamTaskCodeMap.keySet
upstreamTaskCodes.removeAll(queryUpStreamTaskCodeMap.keySet());
if (!upstreamTaskCodes.isEmpty()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, StringUtils.join(upstreamTaskCodes, Constants.COMMA));
return result;
}
} else {
queryUpStreamTaskCodeMap = new HashMap<>();
}
if (!upstreamTaskRelations.isEmpty()) {
ProcessTaskRelation taskRelation = upstreamTaskRelations.get(0);
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, taskRelation.getProcessDefinitionCode());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
List<ProcessTaskRelation> relationList = Lists.newArrayList();
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
if (processTaskRelation.getPostTaskCode() == taskCode) {
if (queryUpStreamTaskCodeMap.containsKey(processTaskRelation.getPreTaskCode()) && processTaskRelation.getPreTaskCode() != 0L) {
queryUpStreamTaskCodeMap.remove(processTaskRelation.getPreTaskCode());
} else {
processTaskRelation.setPreTaskCode(0L);
processTaskRelation.setPreTaskVersion(0);
relationList.add(processTaskRelation);
}
}
}
processTaskRelationList.removeAll(relationList);
for (Map.Entry<Long, TaskDefinition> queryUpStreamTask : queryUpStreamTaskCodeMap.entrySet()) {
taskRelation.setPreTaskCode(queryUpStreamTask.getKey());
taskRelation.setPreTaskVersion(queryUpStreamTask.getValue().getVersion());
processTaskRelationList.add(taskRelation);
}
if (queryUpStreamTaskCodeMap.isEmpty() && !processTaskRelationList.isEmpty()) {
processTaskRelationList.add(processTaskRelationList.get(0));
}
updateDag(loginUser, result, taskRelation.getProcessDefinitionCode(), processTaskRelations, Lists.newArrayList(taskDefinitionToUpdate));
}
result.put(Constants.DATA_LIST, taskCode);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* switch task definition
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode task code
* @param version the version user want to switch
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> switchVersion(User loginUser, long projectCode, long taskCode, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (processService.isTaskOnline(taskCode)) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
return result;
}
TaskDefinitionLog taskDefinitionUpdate = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, version);
taskDefinitionUpdate.setUserId(loginUser.getId());
taskDefinitionUpdate.setUpdateTime(new Date());
taskDefinitionUpdate.setId(taskDefinition.getId());
int switchVersion = taskDefinitionMapper.updateById(taskDefinitionUpdate);
if (switchVersion > 0) {
List<ProcessTaskRelation> taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (!taskRelationList.isEmpty()) {
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
updateDag(loginUser, result, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionUpdate));
} else {
putMsg(result, Status.SUCCESS);
}
} else {
putMsg(result, Status.SWITCH_TASK_DEFINITION_VERSION_ERROR);
}
return result;
}
@Override
public Result queryTaskDefinitionVersions(User loginUser,
long projectCode,
long taskCode,
int pageNo,
int pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
// check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
PageInfo<TaskDefinitionLog> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<TaskDefinitionLog> page = new Page<>(pageNo, pageSize);
IPage<TaskDefinitionLog> taskDefinitionVersionsPaging = taskDefinitionLogMapper.queryTaskDefinitionVersionsPaging(page, taskCode, projectCode);
List<TaskDefinitionLog> taskDefinitionLogs = taskDefinitionVersionsPaging.getRecords();
pageInfo.setTotalList(taskDefinitionLogs);
pageInfo.setTotal((int) taskDefinitionVersionsPaging.getTotal());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
@Override
public Map<String, Object> deleteByCodeAndVersion(User loginUser, long projectCode, long taskCode, int version) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
} else {
if (taskDefinition.getVersion() == version) {
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
return result;
}
int delete = taskDefinitionLogMapper.deleteByCodeAndVersion(taskCode, version);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_TASK_DEFINITION_VERSION_ERROR);
}
}
return result;
}
@Override
public Map<String, Object> queryTaskDefinitionDetail(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
} else {
result.put(Constants.DATA_LIST, taskDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Result queryTaskDefinitionListPaging(User loginUser,
long projectCode,
String searchWorkflowName,
String searchTaskName,
TaskType taskType,
Integer pageNo,
Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
putMsg(result, resultStatus);
return result;
}
Page<TaskMainInfo> page = new Page<>(pageNo, pageSize);
IPage<TaskMainInfo> taskMainInfoIPage = taskDefinitionMapper.queryDefineListPaging(page, projectCode, searchWorkflowName,
searchTaskName, taskType == null ? StringUtils.EMPTY : taskType.getDesc());
List<TaskMainInfo> records = taskMainInfoIPage.getRecords();
if (!records.isEmpty()) {
Map<Long, TaskMainInfo> taskMainInfoMap = new HashMap<>();
for (TaskMainInfo info : records) {
taskMainInfoMap.compute(info.getTaskCode(), (k, v) -> {
if (v == null) {
Map<Long, String> upstreamTaskMap = new HashMap<>();
if (info.getUpstreamTaskCode() != 0) {
upstreamTaskMap.put(info.getUpstreamTaskCode(), info.getUpstreamTaskName());
info.setUpstreamTaskCode(0L);
info.setUpstreamTaskName(StringUtils.EMPTY);
}
info.setUpstreamTaskMap(upstreamTaskMap);
v = info;
}
if (info.getUpstreamTaskCode() != 0) {
v.getUpstreamTaskMap().put(info.getUpstreamTaskCode(), info.getUpstreamTaskName());
}
return v;
});
}
taskMainInfoIPage.setRecords(Lists.newArrayList(taskMainInfoMap.values()));
taskMainInfoIPage.setTotal(taskMainInfoMap.values().size());
}
PageInfo<TaskMainInfo> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) taskMainInfoIPage.getTotal());
pageInfo.setTotalList(taskMainInfoIPage.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
@Override
public Map<String, Object> genTaskCodeList(Integer genNum) {
Map<String, Object> result = new HashMap<>();
if (genNum == null || genNum < 1 || genNum > 100) {
logger.error("the genNum must be great than 1 and less than 100");
putMsg(result, Status.DATA_IS_NOT_VALID, genNum);
return result;
}
List<Long> taskCodes = new ArrayList<>();
try {
for (int i = 0; i < genNum; i++) {
taskCodes.add(CodeGenerateUtils.getInstance().genCode());
}
} catch (CodeGenerateException e) {
logger.error("Task code get error, ", e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code");
}
putMsg(result, Status.SUCCESS);
// return processDefinitionCode
result.put(Constants.DATA_LIST, taskCodes);
return result;
}
/**
* release task definition
*
* @param loginUser login user
* @param projectCode project code
* @param code task definition code
* @param releaseState releaseState
* @return update result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> releaseTaskDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
Status resultStatus = (Status) result.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return result;
}
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(code);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, code);
return result;
}
TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, taskDefinition.getVersion());
if (taskDefinitionLog == null) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, code);
return result;
}
switch (releaseState) {
case OFFLINE:
taskDefinition.setFlag(Flag.NO);
taskDefinitionLog.setFlag(Flag.NO);
break;
case ONLINE:
String resourceIds = taskDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION);
return result;
}
}
taskDefinition.setFlag(Flag.YES);
taskDefinitionLog.setFlag(Flag.YES);
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
int update = taskDefinitionMapper.updateById(taskDefinition);
int updateLog = taskDefinitionLogMapper.updateById(taskDefinitionLog);
if ((update == 0 && updateLog == 1) || (update == 1 && updateLog == 0)) {
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
putMsg(result, Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,366 | sudo.enable=false Is invalid | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Hello, I don't want to use 「sudo-u」 scheduling tasks
so change 「conf/common.properties」 this configuration:
```shell
# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions
sudo.enable=false
```
but is invalid
shell task still use 「sudu -u」.
log info:
…..
[INFO] 2022-02-10 08:34:18.661 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[285] - task run command: sudo -u hadoop sh /home/hadoop/dolphinscheduler/temp/exec/process/4462120816128/4465215383552_1/13/15/13_15.command
[INFO] 2022-02-10 08:34:18.664 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[176] - process start, process id is: 92386
[INFO] 2022-02-10 08:34:18.671 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[200] - process has exited, execute path:/home/hadoop/dolphinscheduler/temp/exec/process/4462120816128/4465215383552_1/13/15, processId:92386 ,exitStatusCode:1 ,processWaitForStatus:true ,processExitValue:1
[INFO] 2022-02-10 08:34:19.664 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[66] - -> welcome to use bigdata scheduling system...
hadoop 不在 sudoers 文件中。此事将被报告。
…...
### What you expected to happen
I think it would not use [sudo -u ] scheduling
### How to reproduce
use this config:
```shell
# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions
sudo.enable=false
```
### Anything else
**source code in class:**
Is it fixed to use [sudo-u]
org.apache.dolphinscheduler.plugin.task.api.AbstractCommandExecutor#buildProcess
```java
private void buildProcess(String commandFile) throws IOException {
// setting up user to run commands
List<String> command = new LinkedList<>();
//init process builder
ProcessBuilder processBuilder = new ProcessBuilder();
// setting up a working directory
processBuilder.directory(new File(taskRequest.getExecutePath()));
// merge error information to standard output stream
processBuilder.redirectErrorStream(true);
// setting up user to run commands
command.add("sudo");
command.add("-u");
command.add(taskRequest.getTenantCode());
command.add(commandInterpreter());
command.addAll(Collections.emptyList());
command.add(commandFile);
// setting commands
processBuilder.command(command);
process = processBuilder.start();
printCommand(command);
}
```
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8366 | https://github.com/apache/dolphinscheduler/pull/8388 | d9129297abac14aba74e87009477fc3502ac116b | 806333a11c03e5757d0c8dc56b5d1ca62fbacd1d | "2022-02-14T00:24:11Z" | java | "2022-02-17T07:23:21Z" | dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_KILL;
import org.apache.dolphinscheduler.plugin.task.util.OSUtils;
import org.apache.dolphinscheduler.spi.task.TaskConstants;
import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.hadoop.hive.common.LogUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* abstract command executor
*/
public abstract class AbstractCommandExecutor {
/**
* rules for extracting application ID
*/
protected static final Pattern APPLICATION_REGEX = Pattern.compile(TaskConstants.APPLICATION_REGEX);
protected StringBuilder varPool = new StringBuilder();
/**
* process
*/
private Process process;
/**
* log handler
*/
protected Consumer<LinkedBlockingQueue<String>> logHandler;
/**
* logger
*/
protected Logger logger;
/**
* log list
*/
protected LinkedBlockingQueue<String> logBuffer;
protected boolean logOutputIsSuccess = false;
/*
* SHELL result string
*/
protected String taskResultString;
/**
* taskRequest
*/
protected TaskRequest taskRequest;
public AbstractCommandExecutor(Consumer<LinkedBlockingQueue<String>> logHandler,
TaskRequest taskRequest,
Logger logger) {
this.logHandler = logHandler;
this.taskRequest = taskRequest;
this.logger = logger;
this.logBuffer = new LinkedBlockingQueue<>();
}
public AbstractCommandExecutor(LinkedBlockingQueue<String> logBuffer) {
this.logBuffer = logBuffer;
}
/**
* build process
*
* @param commandFile command file
* @throws IOException IO Exception
*/
private void buildProcess(String commandFile) throws IOException {
// setting up user to run commands
List<String> command = new LinkedList<>();
//init process builder
ProcessBuilder processBuilder = new ProcessBuilder();
// setting up a working directory
processBuilder.directory(new File(taskRequest.getExecutePath()));
// merge error information to standard output stream
processBuilder.redirectErrorStream(true);
// setting up user to run commands
command.add("sudo");
command.add("-u");
command.add(taskRequest.getTenantCode());
command.add(commandInterpreter());
command.addAll(Collections.emptyList());
command.add(commandFile);
// setting commands
processBuilder.command(command);
process = processBuilder.start();
printCommand(command);
}
public TaskResponse run(String execCommand) throws IOException, InterruptedException {
TaskResponse result = new TaskResponse();
int taskInstanceId = taskRequest.getTaskInstanceId();
if (null == TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId)) {
result.setExitStatusCode(EXIT_CODE_KILL);
return result;
}
if (StringUtils.isEmpty(execCommand)) {
TaskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId);
return result;
}
String commandFilePath = buildCommandFilePath();
// create command file if not exists
createCommandFileIfNotExists(execCommand, commandFilePath);
//build process
buildProcess(commandFilePath);
// parse process output
parseProcessOutput(process);
int processId = getProcessId(process);
result.setProcessId(processId);
// cache processId
taskRequest.setProcessId(processId);
boolean updateTaskExecutionContextStatus = TaskExecutionContextCacheManager.updateTaskExecutionContext(taskRequest);
if (Boolean.FALSE.equals(updateTaskExecutionContextStatus)) {
ProcessUtils.kill(taskRequest);
result.setExitStatusCode(EXIT_CODE_KILL);
return result;
}
// print process id
logger.info("process start, process id is: {}", processId);
// if timeout occurs, exit directly
long remainTime = getRemainTime();
// waiting for the run to finish
boolean status = process.waitFor(remainTime, TimeUnit.SECONDS);
// if SHELL task exit
if (status) {
// set appIds
List<String> appIds = getAppIds(taskRequest.getLogPath());
result.setAppIds(String.join(TaskConstants.COMMA, appIds));
// SHELL task state
result.setExitStatusCode(process.exitValue());
} else {
logger.error("process has failure , exitStatusCode:{}, processExitValue:{}, ready to kill ...",
result.getExitStatusCode(), process.exitValue());
ProcessUtils.kill(taskRequest);
result.setExitStatusCode(EXIT_CODE_FAILURE);
}
logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{} ,processWaitForStatus:{} ,processExitValue:{}",
taskRequest.getExecutePath(), processId, result.getExitStatusCode(), status, process.exitValue());
return result;
}
public String getVarPool() {
return varPool.toString();
}
/**
* cancel application
*
* @throws Exception exception
*/
public void cancelApplication() throws Exception {
if (process == null) {
return;
}
// clear log
clear();
int processId = getProcessId(process);
logger.info("cancel process: {}", processId);
// kill , waiting for completion
boolean killed = softKill(processId);
if (!killed) {
// hard kill
hardKill(processId);
// destory
process.destroy();
process = null;
}
}
/**
* soft kill
*
* @param processId process id
* @return process is alive
*/
private boolean softKill(int processId) {
if (processId != 0 && process.isAlive()) {
try {
// sudo -u user command to run command
String cmd = String.format("kill %d", processId);
cmd = OSUtils.getSudoCmd(taskRequest.getTenantCode(), cmd);
logger.info("soft kill task:{}, process id:{}, cmd:{}", taskRequest.getTaskAppId(), processId, cmd);
Runtime.getRuntime().exec(cmd);
} catch (IOException e) {
logger.info("kill attempt failed", e);
}
}
return process.isAlive();
}
/**
* hard kill
*
* @param processId process id
*/
private void hardKill(int processId) {
if (processId != 0 && process.isAlive()) {
try {
String cmd = String.format("kill -9 %d", processId);
cmd = OSUtils.getSudoCmd(taskRequest.getTenantCode(), cmd);
logger.info("hard kill task:{}, process id:{}, cmd:{}", taskRequest.getTaskAppId(), processId, cmd);
Runtime.getRuntime().exec(cmd);
} catch (IOException e) {
logger.error("kill attempt failed ", e);
}
}
}
private void printCommand(List<String> commands) {
logger.info("task run command: {}", String.join(" ", commands));
}
/**
* clear
*/
private void clear() {
LinkedBlockingQueue<String> markerLog = new LinkedBlockingQueue<>(1);
markerLog.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString());
if (!logBuffer.isEmpty()) {
// log handle
logHandler.accept(logBuffer);
logBuffer.clear();
}
logHandler.accept(markerLog);
}
/**
* get the standard output of the process
*
* @param process process
*/
private void parseProcessOutput(Process process) {
String threadLoggerInfoName = taskRequest.getTaskLogName();
ExecutorService getOutputLogService = newDaemonSingleThreadExecutor(threadLoggerInfoName);
getOutputLogService.submit(() -> {
try (BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = inReader.readLine()) != null) {
if (line.startsWith("${setValue(")) {
varPool.append(line, "${setValue(".length(), line.length() - 2);
varPool.append("$VarPool$");
} else {
logBuffer.add(line);
taskResultString = line;
}
}
logOutputIsSuccess = true;
} catch (Exception e) {
logger.error(e.getMessage(), e);
logOutputIsSuccess = true;
}
});
getOutputLogService.shutdown();
ExecutorService parseProcessOutputExecutorService = newDaemonSingleThreadExecutor(threadLoggerInfoName);
parseProcessOutputExecutorService.submit(() -> {
try {
long lastFlushTime = System.currentTimeMillis();
while (logBuffer.size() > 0 || !logOutputIsSuccess) {
if (logBuffer.size() > 0) {
lastFlushTime = flush(lastFlushTime);
} else {
Thread.sleep(TaskConstants.DEFAULT_LOG_FLUSH_INTERVAL);
}
}
} catch (Exception e) {
Thread.currentThread().interrupt();
logger.error(e.getMessage(), e);
} finally {
clear();
}
});
parseProcessOutputExecutorService.shutdown();
}
/**
* get app links
*
* @param logPath log path
* @return app id list
*/
private List<String> getAppIds(String logPath) {
List<String> logs = convertFile2List(logPath);
List<String> appIds = new ArrayList<>();
/*
* analysis log?get submited yarn application id
*/
for (String log : logs) {
String appId = findAppId(log);
if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) {
logger.info("find app id: {}", appId);
appIds.add(appId);
}
}
return appIds;
}
/**
* convert file to list
*
* @param filename file name
* @return line list
*/
private List<String> convertFile2List(String filename) {
List<String> lineList = new ArrayList<>(100);
File file = new File(filename);
if (!file.exists()) {
return lineList;
}
try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8))) {
String line;
while ((line = br.readLine()) != null) {
lineList.add(line);
}
} catch (Exception e) {
logger.error(String.format("read file: %s failed : ", filename), e);
}
return lineList;
}
/**
* find app id
*
* @param line line
* @return appid
*/
private String findAppId(String line) {
Matcher matcher = APPLICATION_REGEX.matcher(line);
if (matcher.find()) {
return matcher.group();
}
return null;
}
/**
* get remain time(s)
*
* @return remain time
*/
private long getRemainTime() {
long usedTime = (System.currentTimeMillis() - taskRequest.getStartTime().getTime()) / 1000;
long remainTime = taskRequest.getTaskTimeout() - usedTime;
if (remainTime < 0) {
throw new RuntimeException("task execution time out");
}
return remainTime;
}
/**
* get process id
*
* @param process process
* @return process id
*/
private int getProcessId(Process process) {
int processId = 0;
try {
Field f = process.getClass().getDeclaredField(TaskConstants.PID);
f.setAccessible(true);
processId = f.getInt(process);
} catch (Throwable e) {
logger.error(e.getMessage(), e);
}
return processId;
}
/**
* when log buffer siz or flush time reach condition , then flush
*
* @param lastFlushTime last flush time
* @return last flush time
*/
private long flush(long lastFlushTime) {
long now = System.currentTimeMillis();
/*
* when log buffer siz or flush time reach condition , then flush
*/
if (logBuffer.size() >= TaskConstants.DEFAULT_LOG_ROWS_NUM || now - lastFlushTime > TaskConstants.DEFAULT_LOG_FLUSH_INTERVAL) {
lastFlushTime = now;
logHandler.accept(logBuffer);
logBuffer.clear();
}
return lastFlushTime;
}
protected abstract String buildCommandFilePath();
protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException;
ExecutorService newDaemonSingleThreadExecutor(String threadName) {
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat(threadName)
.build();
return Executors.newSingleThreadExecutor(threadFactory);
}
protected abstract String commandInterpreter();
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,366 | sudo.enable=false Is invalid | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Hello, I don't want to use 「sudo-u」 scheduling tasks
so change 「conf/common.properties」 this configuration:
```shell
# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions
sudo.enable=false
```
but is invalid
shell task still use 「sudu -u」.
log info:
…..
[INFO] 2022-02-10 08:34:18.661 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[285] - task run command: sudo -u hadoop sh /home/hadoop/dolphinscheduler/temp/exec/process/4462120816128/4465215383552_1/13/15/13_15.command
[INFO] 2022-02-10 08:34:18.664 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[176] - process start, process id is: 92386
[INFO] 2022-02-10 08:34:18.671 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[200] - process has exited, execute path:/home/hadoop/dolphinscheduler/temp/exec/process/4462120816128/4465215383552_1/13/15, processId:92386 ,exitStatusCode:1 ,processWaitForStatus:true ,processExitValue:1
[INFO] 2022-02-10 08:34:19.664 TaskLogLogger-class org.apache.dolphinscheduler.plugin.task.shell.ShellTask:[66] - -> welcome to use bigdata scheduling system...
hadoop 不在 sudoers 文件中。此事将被报告。
…...
### What you expected to happen
I think it would not use [sudo -u ] scheduling
### How to reproduce
use this config:
```shell
# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions
sudo.enable=false
```
### Anything else
**source code in class:**
Is it fixed to use [sudo-u]
org.apache.dolphinscheduler.plugin.task.api.AbstractCommandExecutor#buildProcess
```java
private void buildProcess(String commandFile) throws IOException {
// setting up user to run commands
List<String> command = new LinkedList<>();
//init process builder
ProcessBuilder processBuilder = new ProcessBuilder();
// setting up a working directory
processBuilder.directory(new File(taskRequest.getExecutePath()));
// merge error information to standard output stream
processBuilder.redirectErrorStream(true);
// setting up user to run commands
command.add("sudo");
command.add("-u");
command.add(taskRequest.getTenantCode());
command.add(commandInterpreter());
command.addAll(Collections.emptyList());
command.add(commandFile);
// setting commands
processBuilder.command(command);
process = processBuilder.start();
printCommand(command);
}
```
### Version
2.0.2
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8366 | https://github.com/apache/dolphinscheduler/pull/8388 | d9129297abac14aba74e87009477fc3502ac116b | 806333a11c03e5757d0c8dc56b5d1ca62fbacd1d | "2022-02-14T00:24:11Z" | java | "2022-02-17T07:23:21Z" | dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/OSUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.util;
import org.apache.dolphinscheduler.plugin.task.api.ShellExecutor;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.io.IOException;
import java.util.StringTokenizer;
public class OSUtils {
private OSUtils() {
throw new IllegalStateException("Utility class");
}
/**
* get sudo command
*
* @param tenantCode tenantCode
* @param command command
* @return result of sudo execute command
*/
public static String getSudoCmd(String tenantCode, String command) {
return StringUtils.isEmpty(tenantCode) ? command : "sudo -u " + tenantCode + " " + command;
}
/**
* whether is macOS
*
* @return true if mac
*/
public static boolean isMacOS() {
return getOSName().startsWith("Mac");
}
/**
* whether is windows
*
* @return true if windows
*/
public static boolean isWindows() {
return getOSName().startsWith("Windows");
}
/**
* Execute the corresponding command of Linux or Windows
*
* @param command command
* @return result of execute command
* @throws IOException errors
*/
public static String exeCmd(String command) throws IOException {
StringTokenizer st = new StringTokenizer(command);
String[] cmdArray = new String[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++) {
cmdArray[i] = st.nextToken();
}
return exeShell(cmdArray);
}
/**
* Execute the shell
*
* @param command command
* @return result of execute the shell
* @throws IOException errors
*/
public static String exeShell(String[] command) throws IOException {
return ShellExecutor.execCommand(command);
}
public static String getOSName() {
return System.getProperty("os.name");
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,378 | [Bug] [API] DeleteTaskRelation api report ConcurrentModificationException | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
<img width="1434" alt="lQLPDhsmXnHJphTNATDNCzSwRBcst_MOkoUCD4luqgD0AA_2868_304" src="https://user-images.githubusercontent.com/42576980/153859728-0abf23be-33f5-4274-aa85-2d14f785378e.png">
### What you expected to happen
It can operate normally
### How to reproduce
Unbinding task and processDefinition
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8378 | https://github.com/apache/dolphinscheduler/pull/8379 | 806333a11c03e5757d0c8dc56b5d1ca62fbacd1d | f9c090e048250a6ec57719be9bc0f385d5b97f97 | "2022-02-14T11:58:33Z" | java | "2022-02-17T07:38:13Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ConditionType;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.collections.CollectionUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.collect.Lists;
/**
* process task relation service impl
*/
@Service
public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements ProcessTaskRelationService {
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessTaskRelationMapper processTaskRelationMapper;
@Autowired
private TaskDefinitionLogMapper taskDefinitionLogMapper;
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProcessService processService;
/**
* create process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode processDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return create result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> createProcessTaskRelation(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
if (processDefinition.getProjectCode() != projectCode) {
putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH);
return result;
}
updateProcessDefiniteVersion(loginUser, result, processDefinition);
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelations = Lists.newArrayList(processTaskRelationList);
if (!processTaskRelations.isEmpty()) {
Map<Long, ProcessTaskRelation> preTaskCodeMap = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == postTaskCode)
.collect(Collectors.toMap(ProcessTaskRelation::getPreTaskCode, processTaskRelation -> processTaskRelation));
if (!preTaskCodeMap.isEmpty()) {
if (preTaskCodeMap.containsKey(preTaskCode) || (!preTaskCodeMap.containsKey(0L) && preTaskCode == 0L)) {
putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, processDefinitionCode);
return result;
}
if (preTaskCodeMap.containsKey(0L) && preTaskCode != 0L) {
// delete no upstream
processTaskRelations.remove(preTaskCodeMap.get(0L));
}
}
}
TaskDefinition postTaskDefinition = taskDefinitionMapper.queryByCode(postTaskCode);
ProcessTaskRelation processTaskRelation = setRelation(processDefinition, postTaskDefinition);
if (preTaskCode != 0L) {
TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(preTaskCode);
List<ProcessTaskRelation> upstreamTaskRelationList = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == preTaskCode).collect(Collectors.toList());
// upstream is or not exist
if (upstreamTaskRelationList.isEmpty()) {
ProcessTaskRelation preProcessTaskRelation = setRelation(processDefinition, preTaskDefinition);
preProcessTaskRelation.setPreTaskCode(0L);
preProcessTaskRelation.setPreTaskVersion(0);
processTaskRelations.add(preProcessTaskRelation);
}
processTaskRelation.setPreTaskCode(preTaskDefinition.getCode());
processTaskRelation.setPreTaskVersion(preTaskDefinition.getVersion());
} else {
processTaskRelation.setPreTaskCode(0L);
processTaskRelation.setPreTaskVersion(0);
}
processTaskRelations.add(processTaskRelation);
updateRelation(loginUser, result, processDefinition, processTaskRelations);
return result;
}
private ProcessTaskRelation setRelation(ProcessDefinition processDefinition, TaskDefinition taskDefinition) {
Date now = new Date();
ProcessTaskRelation processTaskRelation = new ProcessTaskRelation();
processTaskRelation.setProjectCode(processDefinition.getProjectCode());
processTaskRelation.setProcessDefinitionCode(processDefinition.getCode());
processTaskRelation.setProcessDefinitionVersion(processDefinition.getVersion());
processTaskRelation.setPostTaskCode(taskDefinition.getCode());
processTaskRelation.setPostTaskVersion(taskDefinition.getVersion());
processTaskRelation.setConditionType(ConditionType.NONE);
processTaskRelation.setConditionParams("{}");
processTaskRelation.setCreateTime(now);
processTaskRelation.setUpdateTime(now);
return processTaskRelation;
}
private void updateProcessDefiniteVersion(User loginUser, Map<String, Object> result, ProcessDefinition processDefinition) {
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
processDefinition.setVersion(insertVersion);
}
/**
* delete process task relation
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param taskCode the post task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (taskCode == 0) {
putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (null == taskDefinition) {
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode);
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) {
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
return result;
}
List<Long> downstreamList = Lists.newArrayList();
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
if (processTaskRelation.getPreTaskCode() == taskCode) {
downstreamList.add(processTaskRelation.getPostTaskCode());
}
if (processTaskRelation.getPostTaskCode() == taskCode) {
processTaskRelationList.remove(processTaskRelation);
}
}
if (CollectionUtils.isNotEmpty(downstreamList)) {
putMsg(result, Status.TASK_HAS_DOWNSTREAM, org.apache.commons.lang.StringUtils.join(downstreamList, ","));
return result;
}
updateProcessDefiniteVersion(loginUser, result, processDefinition);
updateRelation(loginUser, result, processDefinition, processTaskRelationList);
if (TaskType.CONDITIONS.getDesc().equals(taskDefinition.getTaskType())
|| TaskType.DEPENDENT.getDesc().equals(taskDefinition.getTaskType())
|| TaskType.SUB_PROCESS.getDesc().equals(taskDefinition.getTaskType())) {
int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode);
if (0 == deleteTaskDefinition) {
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
}
}
putMsg(result, Status.SUCCESS);
return result;
}
private void updateRelation(User loginUser, Map<String, Object> result, ProcessDefinition processDefinition,
List<ProcessTaskRelation> processTaskRelationList) {
List<ProcessTaskRelationLog> relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList());
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(),
processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
}
/**
* delete task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param preTaskCodes the pre task codes, sep ','
* @param taskCode the post task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteUpstreamRelation(User loginUser, long projectCode, String preTaskCodes, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isEmpty(preTaskCodes)) {
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result;
}
List<ProcessTaskRelation> upstreamList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (CollectionUtils.isEmpty(upstreamList)) {
putMsg(result, Status.DATA_IS_NULL, "taskCode");
return result;
}
List<Long> preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList());
if (preTaskCodeList.contains(0L)) {
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, upstreamList.get(0).getProcessDefinitionCode());
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
List<ProcessTaskRelation> processTaskRelationWaitRemove = Lists.newArrayList();
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
if (preTaskCodeList.size() > 1) {
if (preTaskCodeList.contains(processTaskRelation.getPreTaskCode())) {
preTaskCodeList.remove(processTaskRelation.getPreTaskCode());
processTaskRelationWaitRemove.add(processTaskRelation);
}
} else {
if (processTaskRelation.getPostTaskCode() == taskCode) {
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPreTaskCode(0L);
}
}
if (preTaskCodeList.contains(processTaskRelation.getPostTaskCode())) {
processTaskRelationWaitRemove.add(processTaskRelation);
}
}
processTaskRelationList.removeAll(processTaskRelationWaitRemove);
updateProcessDefiniteVersion(loginUser, result, processDefinition);
updateRelation(loginUser, result, processDefinition, processTaskRelationList);
return result;
}
/**
* delete task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param postTaskCodes the post task codes, sep ','
* @param taskCode the pre task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteDownstreamRelation(User loginUser, long projectCode, String postTaskCodes, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (StringUtils.isEmpty(postTaskCodes)) {
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result;
}
List<ProcessTaskRelation> downstreamList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
if (CollectionUtils.isEmpty(downstreamList)) {
putMsg(result, Status.DATA_IS_NULL, "taskCode");
return result;
}
List<Long> postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList());
if (postTaskCodeList.contains(0L)) {
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, downstreamList.get(0).getProcessDefinitionCode());
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode());
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
processTaskRelationList.removeIf(processTaskRelation -> postTaskCodeList.contains(processTaskRelation.getPostTaskCode()) && processTaskRelation.getPreTaskCode() == taskCode);
updateProcessDefiniteVersion(loginUser, result, processDefinition);
updateRelation(loginUser, result, processDefinition, processTaskRelationList);
return result;
}
/**
* query task upstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode current task code (post task code)
* @return the upstream task definitions
*/
@Override
public Map<String, Object> queryUpstreamRelation(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition();
taskDefinition.setProjectCode(processTaskRelation.getProjectCode());
taskDefinition.setCode(processTaskRelation.getPreTaskCode());
taskDefinition.setVersion(processTaskRelation.getPreTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions);
}
result.put(Constants.DATA_LIST, taskDefinitionLogList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task downstream relation
*
* @param loginUser login user
* @param projectCode project code
* @param taskCode pre task code
* @return the downstream task definitions
*/
@Override
public Map<String, Object> queryDownstreamRelation(User loginUser, long projectCode, long taskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
List<ProcessTaskRelation> processTaskRelationList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
List<TaskDefinitionLog> taskDefinitionLogList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processTaskRelationList)) {
Set<TaskDefinition> taskDefinitions = processTaskRelationList
.stream()
.map(processTaskRelation -> {
TaskDefinition taskDefinition = buildTaskDefinition();
taskDefinition.setProjectCode(processTaskRelation.getProjectCode());
taskDefinition.setCode(processTaskRelation.getPostTaskCode());
taskDefinition.setVersion(processTaskRelation.getPostTaskVersion());
return taskDefinition;
})
.collect(Collectors.toSet());
taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions);
}
result.put(Constants.DATA_LIST, taskDefinitionLogList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete edge
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param preTaskCode pre task code
* @param postTaskCode post task code
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public Map<String, Object> deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) {
Project project = projectMapper.queryByCode(projectCode);
//check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode);
return result;
}
List<ProcessTaskRelation> processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) {
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
return result;
}
Map<Long, List<ProcessTaskRelation>> taskRelationMap = new HashMap<>();
for (ProcessTaskRelation processTaskRelation : processTaskRelationList) {
taskRelationMap.compute(processTaskRelation.getPostTaskCode(), (k, v) -> {
if (v == null) {
v = new ArrayList<>();
}
v.add(processTaskRelation);
return v;
});
}
if (!taskRelationMap.containsKey(postTaskCode)) {
putMsg(result, Status.DATA_IS_NULL, "postTaskCode");
return result;
}
if (taskRelationMap.get(postTaskCode).size() > 1) {
for (ProcessTaskRelation processTaskRelation : taskRelationMap.get(postTaskCode)) {
if (processTaskRelation.getPreTaskCode() == preTaskCode) {
int delete = processTaskRelationMapper.deleteById(processTaskRelation.getId());
if (delete == 0) {
putMsg(result, Status.DELETE_EDGE_ERROR);
throw new ServiceException(Status.DELETE_EDGE_ERROR);
}
processTaskRelationList.remove(processTaskRelation);
}
}
} else {
ProcessTaskRelation processTaskRelation = taskRelationMap.get(postTaskCode).get(0);
processTaskRelationList.remove(processTaskRelation);
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPreTaskCode(0L);
processTaskRelationList.add(processTaskRelation);
}
updateProcessDefiniteVersion(loginUser, result, processDefinition);
updateRelation(loginUser, result, processDefinition, processTaskRelationList);
return result;
}
/**
* build task definition
*
* @return task definition
*/
private TaskDefinition buildTaskDefinition() {
return new TaskDefinition() {
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TaskDefinition)) {
return false;
}
TaskDefinition that = (TaskDefinition) o;
return getCode() == that.getCode()
&& getVersion() == that.getVersion()
&& getProjectCode() == that.getProjectCode();
}
@Override
public int hashCode() {
return Objects.hash(getCode(), getVersion(), getProjectCode());
}
};
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,170 | [Bug] [UI] SUB_PROCESS Clicking into the child node does not react | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
**Workflow Definition List page**, select Workflow, Enter, Select SUB_PROCESS Component Click into child nodes without reaction.
note:There is no problem with this feature of the workflow instance page.
ERROR:
![5c49f82876d21d5a445b9c76d650622](https://user-images.githubusercontent.com/18095523/153817297-13a04d85-2aa5-4504-b13f-a4361146cade.png)
![image](https://user-images.githubusercontent.com/18095523/150746570-d688cb8e-6155-4d21-806f-ee1bce0db1fc.png)
### What you expected to happen
Child nodes can be entered.
### How to reproduce
See "What happened"
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8170 | https://github.com/apache/dolphinscheduler/pull/8371 | f9c090e048250a6ec57719be9bc0f385d5b97f97 | 6334a8221a0dfa35d7fb69de227b5aedbe775a0f | "2022-01-24T08:21:58Z" | java | "2022-02-17T07:44:19Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="form-model-wrapper" v-clickoutside="_handleClose">
<div class="title-box">
<span class="name"
>{{ $t("Current node settings") }}
<a
v-if="helpUrlEnable(nodeData.taskType)"
class="helper-link"
target="_blank"
:href="helpUrl(nodeData.taskType)"
>
<i class="el-icon-question" />
{{ nodeData.taskType }} {{ $t("Instructions") }}</a
>
</span>
<span class="go-subtask">
<!-- Component can't pop up box to do component processing -->
<m-log
v-if="type === 'instance' && taskInstance"
:item="backfillItem"
:task-instance-id="taskInstance.id"
>
<template slot="history"
><a href="javascript:" @click="_seeHistory"
><em class="ansicon el-icon-alarm-clock"></em
><em>{{ $t("View history") }}</em></a
></template
>
<template slot="log"
><a href="javascript:"
><em class="ansicon el-icon-document"></em
><em>{{ $t("View log") }}</em></a
></template
>
</m-log>
<a href="javascript:" @click="_goSubProcess" v-if="_isGoSubProcess"
><em class="ansicon ri-node-tree"></em
><em>{{ $t("Enter this child node") }}</em></a
>
</span>
</div>
<div class="content-box" v-if="isContentBox">
<div class="form-model">
<!-- Reference from task -->
<!-- <reference-from-task :taskType="nodeData.taskType" /> -->
<!-- Node name -->
<m-list-box>
<div slot="text">{{ $t("Node name") }}</div>
<div slot="content">
<el-input
type="text"
v-model="name"
size="small"
:disabled="isDetails"
:placeholder="$t('Please enter name (required)')"
maxlength="100"
id="inputNodeName"
>
</el-input>
</div>
</m-list-box>
<m-list-box v-if="fromTaskDefinition">
<div slot="text">{{ $t("Task Type") }}</div>
<div slot="content">
<el-select
@change="changeTaskType"
:value="nodeData.taskType"
:placeholder="$t('Please select a task type (required)')"
size="small"
style="width: 100%"
:disabled="isDetails"
>
<el-option
v-for="type in tasksTypeList"
:key="type"
:label="type"
:value="type"
:disabled="type === 'CONDITIONS' || type === 'SWITCH'"
>
</el-option>
</el-select>
</div>
</m-list-box>
<m-list-box v-if="fromTaskDefinition">
<div slot="text">{{ $t("Process Name") }}</div>
<div slot="content">
<el-select
@change="changeProcessCode"
:value="processCode"
size="small"
style="width: 100%"
:disabled="isDetails || taskDefinition"
>
<el-option
v-for="process in processListS"
:key="process.code"
:label="process.name"
:value="process.code"
/>
</el-select>
</div>
</m-list-box>
<!-- Running sign -->
<m-list-box>
<div slot="text">{{ $t("Run flag") }}</div>
<div slot="content">
<el-radio-group v-model="runFlag" size="small">
<el-radio :label="'YES'" :disabled="isDetails">{{
$t("Normal")
}}</el-radio>
<el-radio :label="'NO'" :disabled="isDetails">{{
$t("Prohibition execution")
}}</el-radio>
</el-radio-group>
</div>
</m-list-box>
<!-- description -->
<m-list-box>
<div slot="text">{{ $t("Description") }}</div>
<div slot="content">
<el-input
:rows="2"
type="textarea"
:disabled="isDetails"
v-model="desc"
:placeholder="$t('Please enter description')"
>
</el-input>
</div>
</m-list-box>
<!-- Task priority -->
<m-list-box>
<div slot="text">{{ $t("Task priority") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-priority v-model="taskInstancePriority"></m-priority>
</span>
</div>
</m-list-box>
<!-- Worker group and environment -->
<m-list-box>
<div slot="text">{{ $t("Worker group") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-worker-groups v-model="workerGroup"></m-worker-groups>
</span>
<span class="text-b">{{ $t("Environment Name") }}</span>
<m-related-environment
v-model="environmentCode"
:workerGroup="workerGroup"
:isNewCreate="isNewCreate"
v-on:environmentCodeEvent="_onUpdateEnvironmentCode"
></m-related-environment>
</div>
</m-list-box>
<!-- Worker group and environment -->
<m-list-box>
<div slot="text">{{ $t("Task group name") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<m-task-groups
v-model="taskGroupId"
:project-code="this.projectCode"
v-on:taskGroupIdEvent="_onUpdateTaskGroupId"
></m-task-groups>
</span>
<span class="text-b">{{ $t("Task group queue priority") }}</span>
<el-input
:disabled="taskGroupId === ''"
style="width: 166px"
type="input"
v-model="taskGroupPriority"
maxlength="60"
v-on:input="_onUpdateTaskGroupPriority"
size="small"
>
</el-input>
</div>
</m-list-box>
<!-- Number of failed retries -->
<m-list-box v-if="nodeData.taskType !== 'SUB_PROCESS'">
<div slot="text">{{ $t("Number of failed retries") }}</div>
<div slot="content">
<el-input v-model.number="maxRetryTimes" size="small" style="width: 150px;" />
<span>({{ $t("Times") }})</span>
<span class="text-b">{{ $t("Failed retry interval") }}</span>
<el-input v-model.number="retryInterval" size="small" style="width: 150px;" />
<span>({{ $t("Minute") }})</span>
</div>
</m-list-box>
<!-- Delay execution time -->
<m-list-box
v-if="
nodeData.taskType !== 'SUB_PROCESS' &&
nodeData.taskType !== 'CONDITIONS' &&
nodeData.taskType !== 'DEPENDENT' &&
nodeData.taskType !== 'SWITCH'
"
>
<div slot="text">{{ $t("Delay execution time") }}</div>
<div slot="content">
<m-select-input
v-model="delayTime"
:list="[]"
></m-select-input>
<span>({{ $t("Minute") }})</span>
</div>
</m-list-box>
<!-- Branch flow -->
<m-list-box v-if="nodeData.taskType === 'CONDITIONS'">
<div slot="text">{{ $t("State") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<el-select
style="width: 157px"
size="small"
v-model="successNode"
:disabled="true"
>
<el-option
v-for="item in stateList"
:key="item.value"
:value="item.value"
:label="item.label"
></el-option>
</el-select>
</span>
<span class="text-b" style="padding-left: 38px">{{
$t("Branch flow")
}}</span>
<el-select
style="width: 157px"
size="small"
v-model="successBranch"
clearable
:disabled="isDetails"
>
<el-option
v-for="item in postTasks"
:key="item.code"
:value="item.code"
:label="item.name"
></el-option>
</el-select>
</div>
</m-list-box>
<m-list-box v-if="nodeData.taskType === 'CONDITIONS'">
<div slot="text">{{ $t("State") }}</div>
<div slot="content">
<span class="label-box" style="width: 193px; display: inline-block">
<el-select
style="width: 157px"
size="small"
v-model="failedNode"
:disabled="true"
>
<el-option
v-for="item in stateList"
:key="item.value"
:value="item.value"
:label="item.label"
></el-option>
</el-select>
</span>
<span class="text-b" style="padding-left: 38px">{{
$t("Branch flow")
}}</span>
<el-select
style="width: 157px"
size="small"
v-model="failedBranch"
clearable
:disabled="isDetails"
>
<el-option
v-for="item in postTasks"
:key="item.code"
:value="item.code"
:label="item.name"
></el-option>
</el-select>
</div>
</m-list-box>
<div v-if="backfillRefresh">
<!-- Task timeout alarm -->
<m-timeout-alarm
v-if="nodeData.taskType !== 'DEPENDENT'"
ref="timeout"
:backfill-item="backfillItem"
@on-timeout="_onTimeout"
>
</m-timeout-alarm>
<!-- Dependent timeout alarm -->
<m-dependent-timeout
v-if="nodeData.taskType === 'DEPENDENT'"
ref="dependentTimeout"
:backfill-item="backfillItem"
@on-timeout="_onDependentTimeout"
>
</m-dependent-timeout>
<!-- shell node -->
<m-shell
v-if="nodeData.taskType === 'SHELL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SHELL"
:backfill-item="backfillItem"
>
</m-shell>
<!-- sub_process node -->
<m-sub-process
v-if="nodeData.taskType === 'SUB_PROCESS'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
@on-set-process-name="_onSetProcessName"
ref="SUB_PROCESS"
:backfill-item="backfillItem"
>
</m-sub-process>
<!-- procedure node -->
<m-procedure
v-if="nodeData.taskType === 'PROCEDURE'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PROCEDURE"
:backfill-item="backfillItem"
>
</m-procedure>
<!-- sql node -->
<m-sql
v-if="nodeData.taskType === 'SQL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQL"
:create-node-id="nodeData.id"
:backfill-item="backfillItem"
>
</m-sql>
<!-- spark node -->
<m-spark
v-if="nodeData.taskType === 'SPARK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SPARK"
:backfill-item="backfillItem"
>
</m-spark>
<m-flink
v-if="nodeData.taskType === 'FLINK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="FLINK"
:backfill-item="backfillItem"
>
</m-flink>
<!-- mr node -->
<m-mr
v-if="nodeData.taskType === 'MR'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="MR"
:backfill-item="backfillItem"
>
</m-mr>
<!-- python node -->
<m-python
v-if="nodeData.taskType === 'PYTHON'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PYTHON"
:backfill-item="backfillItem"
>
</m-python>
<!-- dependent node -->
<m-dependent
v-if="nodeData.taskType === 'DEPENDENT'"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
ref="DEPENDENT"
:backfill-item="backfillItem"
>
</m-dependent>
<m-http
v-if="nodeData.taskType === 'HTTP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="HTTP"
:backfill-item="backfillItem"
>
</m-http>
<m-datax
v-if="nodeData.taskType === 'DATAX'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="DATAX"
:backfill-item="backfillItem"
>
</m-datax>
<m-pigeon
v-if="nodeData.taskType === 'PIGEON'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
:backfill-item="backfillItem"
ref="PIGEON"
>
</m-pigeon>
<m-sqoop
v-if="nodeData.taskType === 'SQOOP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQOOP"
:backfill-item="backfillItem"
>
</m-sqoop>
<m-conditions
v-if="nodeData.taskType === 'CONDITIONS'"
ref="CONDITIONS"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
:backfill-item="backfillItem"
:prev-tasks="prevTasks"
>
</m-conditions>
<m-data-quality
v-if="nodeData.taskType === 'DATA_QUALITY'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="DATA_QUALITY"
:backfill-item="backfillItem">
</m-data-quality>
<m-switch
v-if="nodeData.taskType === 'SWITCH'"
ref="SWITCH"
@on-switch-result="_onSwitchResult"
:backfill-item="backfillItem"
:nodeData="nodeData"
:postTasks="postTasks"
></m-switch>
<!-- seatunnel node -->
<m-seatunnel
v-if="nodeData.taskType === 'SEATUNNEL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SEATUNNEL"
:backfill-item="backfillItem"
>
</m-seatunnel>
</div>
<!-- Pre-tasks in workflow -->
<m-pre-tasks ref="preTasks" :code="code" :fromTaskDefinition="fromTaskDefinition" :prevTasks="prevTasks" :processDefinition="processDefinition"/>
</div>
</div>
<div class="bottom-box">
<div class="submit" style="background: #fff">
<el-button type="text" size="small" id="cancelBtn">
{{ $t("Cancel") }}
</el-button>
<el-button
type="primary"
size="small"
round
:loading="spinnerLoading"
@click="ok()"
:disabled="isDetails"
id="btnSubmit"
>{{ spinnerLoading ? $t("Loading...") : $t("Confirm") }}
</el-button>
</div>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions, mapState } from 'vuex'
import mLog from './log'
import mMr from './tasks/mr'
import mSql from './tasks/sql'
import i18n from '@/module/i18n'
import { findLocale } from '@/module/i18n/config'
import mListBox from './tasks/_source/listBox'
import mShell from './tasks/shell'
import mSeatunnel from './tasks/seatunnel'
import mSpark from './tasks/spark'
import mFlink from './tasks/flink'
import mPython from './tasks/python'
import mProcedure from './tasks/procedure'
import mDependent from './tasks/dependent'
import mHttp from './tasks/http'
import mDatax from './tasks/datax'
import mPigeon from './tasks/pigeon'
import mConditions from './tasks/conditions'
import mDataQuality from './tasks/dataquality'
import mSwitch from './tasks/switch.vue'
import mSqoop from './tasks/sqoop'
import mSubProcess from './tasks/sub_process'
import mSelectInput from './_source/selectInput'
import mTimeoutAlarm from './_source/timeoutAlarm'
import mDependentTimeout from './_source/dependentTimeout'
import mWorkerGroups from './_source/workerGroups'
import mRelatedEnvironment from './_source/relatedEnvironment'
import mTaskGroups from './_source/taskGroups'
import mPreTasks from './tasks/pre_tasks'
import clickoutside from '@/module/util/clickoutside'
import disabledState from '@/module/mixin/disabledState'
import mPriority from '@/module/components/priority/priority'
import { findComponentDownward } from '@/module/util/'
import { tasksType } from '@/conf/home/pages/dag/_source/config.js'
// import ReferenceFromTask from './_source/referenceFromTask.vue'
export default {
name: 'form-model',
data () {
return {
// loading
spinnerLoading: false,
// node name
name: '',
// description
desc: '',
// Node echo data
backfillItem: {},
cacheBackfillItem: {},
// Resource(list)
resourcesList: [],
successNode: 'success',
failedNode: 'failed',
successBranch: '',
failedBranch: '',
conditionResult: {
successNode: [],
failedNode: []
},
switchResult: {},
// dependence
dependence: {},
// cache dependence
cacheDependence: {},
// task code
code: 0,
// Current node params data
params: {},
// Running sign
runFlag: 'YES',
// The second echo problem caused by the node data is specifically which node hook caused the unfinished special treatment
isContentBox: false,
// Number of failed retries
maxRetryTimes: '0',
// Failure retry interval
retryInterval: '1',
// Delay execution time
delayTime: '0',
// Task timeout alarm
timeout: {},
// (For Dependent nodes) Wait start timeout alarm
waitStartTimeout: {},
// Task priority
taskInstancePriority: 'MEDIUM',
// worker group id
workerGroup: 'default',
// selected environment
environmentCode: '',
selectedWorkerGroup: '',
taskGroupId: '',
taskGroupPriority: 0,
stateList: [
{
value: 'success',
label: `${i18n.$t('Success')}`
},
{
value: 'failed',
label: `${i18n.$t('Failed')}`
}
],
// for CONDITIONS and SWITCH
postTasks: [],
prevTasks: [],
// refresh part of the formModel, after set backfillItem outside
backfillRefresh: true,
// whether this is a new Task
isNewCreate: true,
tasksTypeList: Object.keys(tasksType),
// processCode
processCode: undefined,
processDefinition: null
}
},
provide () {
return {
formModel: this
}
},
/**
* Click on events that are not generated internally by the component
*/
directives: { clickoutside },
mixins: [disabledState],
props: {
nodeData: Object,
type: {
type: String,
default: ''
},
taskDefinition: Object,
projectCode: {
type: Number
}
},
inject: ['dagChart'],
methods: {
...mapActions('dag', ['getTaskInstanceList', 'getProcessDefinition']),
helpUrlEnable (typekey) {
const type = tasksType[typekey]
if (!type) return false
if (!type.helperLinkDisable) return true
return !type.helperLinkDisable
},
helpUrl (tasktype) {
return 'https://dolphinscheduler.apache.org/' +
findLocale(i18n.globalScope.LOCALE).helperContext +
'/docs/latest/user_doc/guide/task/' + tasktype.toLowerCase() + '.html'
},
taskToBackfillItem (task) {
let strategy = ''
if (this.nodeData.taskType === 'DEPENDENT') {
strategy = task.timeoutNotifyStrategy === 'WARNFAILED' ? 'WARN,FAILED' : task.timeoutNotifyStrategy
} else {
strategy = task.timeoutNotifyStrategy
}
return {
code: task.code,
conditionResult: task.taskParams.conditionResult,
switchResult: task.taskParams.switchResult,
delayTime: task.delayTime,
dependence: task.taskParams.dependence,
desc: task.description,
id: task.id,
maxRetryTimes: task.failRetryTimes,
name: task.name,
params: _.omit(task.taskParams, [
'conditionResult',
'dependence',
'waitStartTimeout',
'switchResult'
]),
retryInterval: task.failRetryInterval,
runFlag: task.flag,
taskInstancePriority: task.taskPriority,
timeout: {
interval: task.timeout,
strategy,
enable: task.timeoutFlag === 'OPEN'
},
type: task.taskType,
waitStartTimeout: task.taskParams.waitStartTimeout,
workerGroup: task.workerGroup,
environmentCode: task.environmentCode,
taskGroupId: task.taskGroupId,
taskGroupPriority: task.taskGroupPriority
}
},
/**
* depend
*/
_onDependent (o) {
this.dependence = Object.assign(this.dependence, {}, o)
},
_onSwitchResult (o) {
this.switchResult = o
},
/**
* cache dependent
*/
_onCacheDependent (o) {
this.cacheDependence = Object.assign(this.cacheDependence, {}, o)
},
/**
* Task timeout alarm
*/
_onTimeout (o) {
this.timeout = Object.assign(this.timeout, {}, o)
},
/**
* Dependent timeout alarm
*/
_onDependentTimeout (o) {
this.timeout = Object.assign(this.timeout, {}, o.waitCompleteTimeout)
this.waitStartTimeout = Object.assign(
this.waitStartTimeout,
{},
o.waitStartTimeout
)
},
/**
* Click external to close the current component
*/
_handleClose () {
// this.close()
},
/**
* Jump to task instance
*/
_seeHistory () {
this.$emit('seeHistory', this.backfillItem.name)
},
/**
* Enter the child node to judge the process instance or the process definition
* @param type = instance
*/
_goSubProcess () {
if (_.isEmpty(this.backfillItem)) {
this.$message.warning(
`${i18n.$t(
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process'
)}`
)
return
}
if (this.router.history.current.name === 'projects-instance-details') {
if (!this.taskInstance) {
this.$message.warning(
`${i18n.$t(
'The task has not been executed and cannot enter the sub-Process'
)}`
)
return
}
this.store
.dispatch('dag/getSubProcessId', { taskId: this.taskInstance.id })
.then((res) => {
this.$emit('onSubProcess', {
subInstanceId: res.data.subProcessInstanceId,
fromThis: this
})
})
.catch((e) => {
this.$message.error(e.msg || '')
})
} else {
const processDefinitionId =
this.backfillItem.params.processDefinitionId
const process = this.processListS.find(
(def) => def.id === processDefinitionId
)
this.$emit('onSubProcess', {
subProcessCode: process.code,
fromThis: this
})
}
},
_onUpdateWorkerGroup (o) {
this.selectedWorkerGroup = o
},
/**
* return params
*/
_onParams (o) {
this.params = Object.assign({}, o)
},
_onUpdateEnvironmentCode (o) {
this.environmentCode = o
},
_onUpdateTaskGroupId (o) {
this.taskGroupId = o
if (this.taskGroupId === '') {
this.taskGroupPriority = 0
}
},
_onUpdateTaskGroupPriority (o) {
this.taskGroupPriority = o
},
/**
* _onCacheParams is reserved
*/
_onCacheParams (o) {
this.params = Object.assign(this.params, {}, o)
},
/**
* verification name
*/
_verifName () {
if (!_.trim(this.name)) {
this.$message.warning(`${i18n.$t('Please enter name (required)')}`)
return false
}
if (
this.successBranch &&
this.successBranch === this.failedBranch
) {
this.$message.warning(
`${i18n.$t(
'Cannot select the same node for successful branch flow and failed branch flow'
)}`
)
return false
}
if (this.name === this.backfillItem.name) {
return true
}
return true
},
_verifWorkGroup () {
let item = this.store.state.security.workerGroupsListAll.find((item) => {
return item.id === this.workerGroup
})
if (item === undefined) {
this.$message.warning(
`${i18n.$t(
'The Worker group no longer exists, please select the correct Worker group!'
)}`
)
return false
}
return true
},
_verifTaskType () {
if (!this.fromTaskDefinition) return true
if (!this.nodeData.taskType) {
this.$message.warning(
`${i18n.$t('Please select a task type (required)')}`
)
return false
}
return true
},
/**
* Global verification procedure
*/
_verification () {
// Verify name
if (!this._verifName()) {
return
}
// Verify task type
if (!this._verifTaskType()) {
return
}
// verif workGroup
if (!this._verifWorkGroup()) {
return
}
// Verify task alarm parameters
if (this.nodeData.taskType === 'DEPENDENT') {
if (!this.$refs.dependentTimeout._verification()) {
return
}
} else {
if (!this.$refs.timeout._verification()) {
return
}
}
// Verify node parameters
if (!this.$refs[this.nodeData.taskType]._verification()) {
return
}
// set dag preTask
if (this.dagChart && this.$refs.preTasks) {
this.$refs.preTasks.setDagPreNodes()
}
// set edge label
if (this.dagChart) {
this._setEdgeLabel()
}
this.successBranch && (this.conditionResult.successNode[0] = this.successBranch)
this.failedBranch && (this.conditionResult.failedNode[0] = this.failedBranch)
this.$emit('addTaskInfo', {
item: {
code: this.nodeData.id,
name: this.name,
description: this.desc,
taskType: this.nodeData.taskType,
taskParams: {
...this.params,
dependence: this.cacheDependence,
conditionResult: this.conditionResult,
waitStartTimeout: this.waitStartTimeout,
switchResult: this.switchResult
},
flag: this.runFlag,
taskPriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
failRetryTimes: this.maxRetryTimes,
failRetryInterval: this.retryInterval,
timeoutFlag: this.timeout.enable ? 'OPEN' : 'CLOSE',
timeoutNotifyStrategy: this.timeout.strategy.indexOf(',') > 0 ? 'WARNFAILED' : this.timeout.strategy,
timeout: this.timeout.interval || 0,
delayTime: this.delayTime,
environmentCode: this.environmentCode || -1,
taskGroupId: this.taskGroupId,
taskGroupPriority: this.taskGroupPriority
},
fromThis: this,
...(this.fromTaskDefinition ? {
prevTasks: this.$refs.preTasks ? this.$refs.preTasks.preTasks : [],
processCode: this.processCode
} : {})
})
},
/**
* Sub-workflow selected node echo name
*/
_onSetProcessName (name) {
this.name = name
},
/**
* set edge label by successBranch && failedBranch
*/
_setEdgeLabel () {
if (this.successBranch || this.failedBranch) {
const canvas = findComponentDownward(this.dagChart, 'dag-canvas')
const edges = canvas.getEdges()
const successTask = this.postTasks.find(
(t) => t.name === this.successBranch
)
const failedTask = this.postTasks.find(
(t) => t.name === this.failedBranch
)
const sEdge = edges.find(
(edge) =>
successTask &&
edge.sourceId === this.code &&
edge.targetId === successTask.code
)
const fEdge = edges.find(
(edge) =>
failedTask &&
edge.sourceId === this.code &&
edge.targetId === failedTask.code
)
sEdge && canvas.setEdgeLabel(sEdge.id, this.$t('Success'))
fEdge && canvas.setEdgeLabel(fEdge.id, this.$t('Failed'))
}
},
/**
* Submit verification
*/
ok () {
this._verification()
},
/**
* Close and destroy component and component internal events
*/
close () {
let flag = false
// Delete node without storage
if (!this.backfillItem.name) {
flag = true
}
this.isContentBox = false
// flag Whether to delete a node this.$destroy()
this.$emit('close', {
item: this.cacheBackfillItem,
flag: flag,
fromThis: this
})
},
backfill (backfillItem) {
const o = backfillItem
// Non-null objects represent backfill
if (!_.isEmpty(o)) {
this.code = o.code
this.name = o.name
this.taskInstancePriority = o.taskInstancePriority
this.runFlag = o.runFlag || 'YES'
this.desc = o.desc
this.maxRetryTimes = o.maxRetryTimes
this.retryInterval = o.retryInterval
this.delayTime = o.delayTime
if (o.conditionResult) {
this.successBranch = o.conditionResult.successNode[0]
this.failedBranch = o.conditionResult.failedNode[0]
}
if (o.switchResult) {
this.switchResult = o.switchResult
}
// If the workergroup has been deleted, set the default workergroup
for (
let i = 0;
i < this.store.state.security.workerGroupsListAll.length;
i++
) {
let workerGroup = this.store.state.security.workerGroupsListAll[i].id
if (o.workerGroup === workerGroup) {
break
}
}
if (o.workerGroup === undefined) {
this.store
.dispatch('dag/getTaskInstanceList', {
pageSize: 10,
pageNo: 1,
processInstanceId: this.nodeData.instanceId,
name: o.name
})
.then((res) => {
this.workerGroup = res.totalList[0].workerGroup
})
} else {
this.workerGroup = o.workerGroup
}
this.environmentCode =
o.environmentCode === -1 ? '' : o.environmentCode
this.params = o.params || {}
this.dependence = o.dependence || {}
this.cacheDependence = o.dependence || {}
this.taskGroupId = o.taskGroupId
this.taskGroupPriority = o.taskGroupPriority
} else {
this.workerGroup = this.store.state.security.workerGroupsListAll[0].id
}
this.cacheBackfillItem = JSON.parse(JSON.stringify(o))
this.isContentBox = true
},
changeTaskType (value) {
this.$emit('changeTaskType', value)
},
calculateRelatedTasks () {
if (this.processDefinition && this.taskDefinition) {
const relations = this.processDefinition.processTaskRelationList || []
const tasks = this.processDefinition.taskDefinitionList || []
const tasksMap = {}
tasks.forEach(task => {
tasksMap[task.code] = task
})
const taskCode = this.taskDefinition.code
const buildTask = (task) => ({
code: task.code,
name: task.name,
type: task.taskType
})
// Downstream tasks
const postTasks = relations
.filter(relation => relation.preTaskCode === taskCode)
.map(relation => buildTask(tasksMap[relation.postTaskCode]))
// Upstream tasks
const prevTasks = relations
.filter(relation => relation.postTaskCode === taskCode && relation.preTaskCode !== 0)
.map(relation => buildTask(tasksMap[relation.preTaskCode]))
this.postTasks = postTasks
this.prevTasks = prevTasks
}
},
getProcessDetails () {
this.getProcessDefinition(this.processCode).then(res => {
this.processDefinition = res
this.calculateRelatedTasks()
})
},
changeProcessCode (code) {
this.processCode = code
this.getProcessDetails()
}
},
created () {
// Backfill data
let o = {}
this.code = this.nodeData.id
if (this.fromTaskDefinition) {
if (this.taskDefinition) {
const backfillItem = this.taskToBackfillItem(this.taskDefinition)
o = backfillItem
this.backfillItem = backfillItem
this.isNewCreate = false
}
} else {
let taskList = this.store.state.dag.tasks
if (taskList.length) {
taskList.forEach((task) => {
if (task.code === this.nodeData.id) {
const backfillItem = this.taskToBackfillItem(task)
o = backfillItem
this.backfillItem = backfillItem
this.isNewCreate = false
}
})
}
}
this.backfill(o)
if (this.dagChart) {
const canvas = findComponentDownward(this.dagChart, 'dag-canvas')
const postNodes = canvas.getPostNodes(this.code)
const prevNodes = canvas.getPrevNodes(this.code)
const buildTask = (node) => ({
code: node.id,
name: node.data.taskName,
type: node.data.taskType
})
this.postTasks = postNodes.map(buildTask)
this.prevTasks = prevNodes.map(buildTask)
}
if (this.fromTaskDefinition && this.taskDefinition) {
this.processCode = this.taskDefinition.processCode
this.getProcessDetails()
}
},
mounted () {
let self = this
$('#cancelBtn').mousedown(function (event) {
event.preventDefault()
self.close()
})
},
updated () {},
beforeDestroy () {},
destroyed () {},
computed: {
...mapState('dag', ['processListS', 'taskInstances']),
/**
* Child workflow entry show/hide
*/
_isGoSubProcess () {
return this.nodeData.taskType === 'SUB_PROCESS' && this.name && this.processListS && this.processListS.length > 0
},
taskInstance () {
if (this.taskInstances.length > 0) {
return this.taskInstances.find(
(instance) => instance.taskCode === this.nodeData.id
)
}
return null
},
/**
* Open the modal from task definition
*/
fromTaskDefinition () {
return this.type === 'task-definition'
}
},
components: {
mListBox,
mMr,
mShell,
mSeatunnel,
mSubProcess,
mProcedure,
mSql,
mLog,
mSpark,
mFlink,
mPython,
mDependent,
mHttp,
mDatax,
mPigeon,
mSqoop,
mConditions,
mDataQuality,
mSwitch,
mSelectInput,
mTimeoutAlarm,
mDependentTimeout,
mPriority,
mWorkerGroups,
mRelatedEnvironment,
mPreTasks,
mTaskGroups
// ReferenceFromTask
}
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./formModel";
.ans-radio-disabled {
.ans-radio-inner:after {
background-color: #6f8391;
}
}
</style>
|