status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
unknown | language
stringclasses 5
values | commit_datetime
unknown | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,710 | [Bug][UI Next][V1.0.0-Alpha] There is no tooltip for timing management table editing and up and down buttons. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/19239641/156883414-c840f2bd-38e9-495d-9c01-c366c9ffe961.png)
### What you expected to happen
There is no tooltip for regular management table editing and up and down buttons.
### How to reproduce
Add tooltip.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8710 | https://github.com/apache/dolphinscheduler/pull/8716 | e2af9054b39f73183490fd8a96efe919a29a488d | 698c795d4f3412e175fb28e569bf34c6d8085f0b | "2022-03-05T12:41:37Z" | java | "2022-03-06T12:59:20Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/timing/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { h, ref, reactive } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import { NSpace, NTooltip, NButton, NPopconfirm, NEllipsis } from 'naive-ui'
import {
deleteScheduleById,
offline,
online,
queryScheduleListPaging
} from '@/service/modules/schedules'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
EditOutlined
} from '@vicons/antd'
import type { Router } from 'vue-router'
import type { TableColumns } from 'naive-ui/es/data-table/src/interface'
import { ISearchParam } from './types'
import styles from '../index.module.scss'
export function useTable() {
const { t } = useI18n()
const router: Router = useRouter()
const columns: TableColumns<any> = [
{
title: '#',
key: 'id',
width: 50,
render: (_row, index) => index + 1
},
{
title: t('project.workflow.workflow_name'),
key: 'processDefinitionName',
width: 200,
render: (_row) =>
h(
NEllipsis,
{ style: 'max-width: 200px' },
{
default: () => _row.processDefinitionName
}
)
},
{
title: t('project.workflow.start_time'),
key: 'startTime'
},
{
title: t('project.workflow.end_time'),
key: 'endTime'
},
{
title: t('project.workflow.crontab'),
key: 'crontab'
},
{
title: t('project.workflow.failure_strategy'),
key: 'failureStrategy'
},
{
title: t('project.workflow.status'),
key: 'releaseState',
render: (_row) =>
_row.releaseState === 'ONLINE'
? t('project.workflow.up_line')
: t('project.workflow.down_line')
},
{
title: t('project.workflow.create_time'),
key: 'createTime'
},
{
title: t('project.workflow.update_time'),
key: 'updateTime'
},
{
title: t('project.workflow.operation'),
key: 'operation',
fixed: 'right',
className: styles.operation,
render: (row) => {
return h(NSpace, null, {
default: () => [
h(
NButton,
{
circle: true,
type: 'info',
size: 'small',
disabled: row.releaseState === 'ONLINE',
onClick: () => {
handleEdit(row)
}
},
{
icon: () => h(EditOutlined)
}
),
h(
NButton,
{
circle: true,
type: row.releaseState === 'ONLINE' ? 'error' : 'warning',
size: 'small',
onClick: () => {
handleReleaseState(row)
}
},
{
icon: () =>
h(
row.releaseState === 'ONLINE'
? ArrowDownOutlined
: ArrowUpOutlined
)
}
),
h(
NPopconfirm,
{
onPositiveClick: () => {
handleDelete(row.id)
}
},
{
trigger: () =>
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'error',
size: 'small'
},
{
icon: () => h(DeleteOutlined)
}
),
default: () => t('project.workflow.delete')
}
),
default: () => t('project.workflow.delete_confirm')
}
)
]
})
}
}
]
const handleEdit = (row: any) => {
variables.showRef = true
variables.row = row
}
const variables = reactive({
columns,
row: {},
tableData: [],
projectCode: ref(Number(router.currentRoute.value.params.projectCode)),
page: ref(1),
pageSize: ref(10),
searchVal: ref(),
totalPage: ref(1),
showRef: ref(false)
})
const getTableData = (params: ISearchParam) => {
const definitionCode = Number(
router.currentRoute.value.params.definitionCode
)
queryScheduleListPaging(
{ ...params, processDefinitionCode: definitionCode },
variables.projectCode
).then((res: any) => {
variables.totalPage = res.totalPage
variables.tableData = res.totalList.map((item: any) => {
return { ...item }
})
})
}
const handleReleaseState = (row: any) => {
let handle = online
if (row.releaseState === 'ONLINE') {
handle = offline
}
handle(variables.projectCode, row.id).then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
}
const handleDelete = (id: number) => {
/* after deleting data from the current page, you need to jump forward when the page is empty. */
if (variables.tableData.length === 1 && variables.page > 1) {
variables.page -= 1
}
deleteScheduleById(id, variables.projectCode)
.then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
.catch((error: any) => {
window.$message.error(error.message)
})
}
return {
variables,
getTableData
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,717 | [Bug][UI Next][V1.0.0-Alpha] Regularly manage multilingual switching issues. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/19239641/156922308-f9813322-ac4e-44a7-8a83-0572645a8218.png)
### What you expected to happen
Regularly manage multilingual switching issues.
### How to reproduce
After switching the language, the header also changes with the language.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8717 | https://github.com/apache/dolphinscheduler/pull/8718 | 698c795d4f3412e175fb28e569bf34c6d8085f0b | 9c162c86c3fad159839e0e58b58a20c2bd0abcce | "2022-03-06T12:09:40Z" | java | "2022-03-06T13:36:11Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/timing/index.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import Card from '@/components/card'
import { ArrowLeftOutlined } from '@vicons/antd'
import { NButton, NDataTable, NIcon, NPagination } from 'naive-ui'
import { defineComponent, onMounted, toRefs } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import { useTable } from './use-table'
import TimingModal from '../components/timing-modal'
import styles from '../index.module.scss'
export default defineComponent({
name: 'WorkflowDefinitionTiming',
setup() {
const { variables, getTableData } = useTable()
const requestData = () => {
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
}
const handleUpdateList = () => {
requestData()
}
const handleSearch = () => {
variables.page = 1
requestData()
}
const handleChangePageSize = () => {
variables.page = 1
requestData()
}
onMounted(() => {
requestData()
})
return {
requestData,
handleSearch,
handleUpdateList,
handleChangePageSize,
...toRefs(variables)
}
},
render() {
const { t } = useI18n()
const router: Router = useRouter()
return (
<div class={styles.content}>
<Card class={styles.card}>
<div class={styles.header}>
<NButton type='primary' onClick={() => router.go(-1)}>
<NIcon>
<ArrowLeftOutlined />
</NIcon>
</NButton>
</div>
</Card>
<Card title={t('project.workflow.cron_manage')}>
<NDataTable
columns={this.columns}
data={this.tableData}
striped
size={'small'}
class={styles.table}
/>
<div class={styles.pagination}>
<NPagination
v-model:page={this.page}
v-model:page-size={this.pageSize}
page-count={this.totalPage}
show-size-picker
page-sizes={[10, 30, 50]}
show-quick-jumper
onUpdatePage={this.requestData}
onUpdatePageSize={this.handleChangePageSize}
/>
</div>
</Card>
<TimingModal
type={'update'}
v-model:row={this.row}
v-model:show={this.showRef}
onUpdateList={this.handleUpdateList}
/>
</div>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,717 | [Bug][UI Next][V1.0.0-Alpha] Regularly manage multilingual switching issues. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/19239641/156922308-f9813322-ac4e-44a7-8a83-0572645a8218.png)
### What you expected to happen
Regularly manage multilingual switching issues.
### How to reproduce
After switching the language, the header also changes with the language.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8717 | https://github.com/apache/dolphinscheduler/pull/8718 | 698c795d4f3412e175fb28e569bf34c6d8085f0b | 9c162c86c3fad159839e0e58b58a20c2bd0abcce | "2022-03-06T12:09:40Z" | java | "2022-03-06T13:36:11Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/timing/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { h, ref, reactive } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import {
NSpace,
NTooltip,
NButton,
NPopconfirm,
NEllipsis,
NIcon
} from 'naive-ui'
import {
deleteScheduleById,
offline,
online,
queryScheduleListPaging
} from '@/service/modules/schedules'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
EditOutlined
} from '@vicons/antd'
import type { Router } from 'vue-router'
import type { TableColumns } from 'naive-ui/es/data-table/src/interface'
import { ISearchParam } from './types'
import styles from '../index.module.scss'
export function useTable() {
const { t } = useI18n()
const router: Router = useRouter()
const columns: TableColumns<any> = [
{
title: '#',
key: 'id',
width: 50,
render: (_row, index) => index + 1
},
{
title: t('project.workflow.workflow_name'),
key: 'processDefinitionName',
width: 200,
render: (_row) =>
h(
NEllipsis,
{ style: 'max-width: 200px' },
{
default: () => _row.processDefinitionName
}
)
},
{
title: t('project.workflow.start_time'),
key: 'startTime'
},
{
title: t('project.workflow.end_time'),
key: 'endTime'
},
{
title: t('project.workflow.crontab'),
key: 'crontab'
},
{
title: t('project.workflow.failure_strategy'),
key: 'failureStrategy'
},
{
title: t('project.workflow.status'),
key: 'releaseState',
render: (_row) =>
_row.releaseState === 'ONLINE'
? t('project.workflow.up_line')
: t('project.workflow.down_line')
},
{
title: t('project.workflow.create_time'),
key: 'createTime'
},
{
title: t('project.workflow.update_time'),
key: 'updateTime'
},
{
title: t('project.workflow.operation'),
key: 'operation',
fixed: 'right',
className: styles.operation,
render: (row) => {
return h(NSpace, null, {
default: () => [
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'info',
size: 'small',
disabled: row.releaseState === 'ONLINE',
onClick: () => {
handleEdit(row)
}
},
{
icon: () => h(EditOutlined)
}
),
default: () => t('project.workflow.edit')
}
),
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: row.releaseState === 'ONLINE' ? 'error' : 'warning',
size: 'small',
onClick: () => {
handleReleaseState(row)
}
},
{
icon: () =>
h(
row.releaseState === 'ONLINE'
? ArrowDownOutlined
: ArrowUpOutlined
)
}
),
default: () =>
row.releaseState === 'ONLINE'
? t('project.workflow.down_line')
: t('project.workflow.up_line')
}
),
h(
NPopconfirm,
{
onPositiveClick: () => {
handleDelete(row.id)
}
},
{
trigger: () =>
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'error',
size: 'small'
},
{
icon: () => h(DeleteOutlined)
}
),
default: () => t('project.workflow.delete')
}
),
default: () => t('project.workflow.delete_confirm')
}
)
]
})
}
}
]
const handleEdit = (row: any) => {
variables.showRef = true
variables.row = row
}
const variables = reactive({
columns,
row: {},
tableData: [],
projectCode: ref(Number(router.currentRoute.value.params.projectCode)),
page: ref(1),
pageSize: ref(10),
searchVal: ref(),
totalPage: ref(1),
showRef: ref(false)
})
const getTableData = (params: ISearchParam) => {
const definitionCode = Number(
router.currentRoute.value.params.definitionCode
)
queryScheduleListPaging(
{ ...params, processDefinitionCode: definitionCode },
variables.projectCode
).then((res: any) => {
variables.totalPage = res.totalPage
variables.tableData = res.totalList.map((item: any) => {
return { ...item }
})
})
}
const handleReleaseState = (row: any) => {
let handle = online
if (row.releaseState === 'ONLINE') {
handle = offline
}
handle(variables.projectCode, row.id).then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
}
const handleDelete = (id: number) => {
/* after deleting data from the current page, you need to jump forward when the page is empty. */
if (variables.tableData.length === 1 && variables.page > 1) {
variables.page -= 1
}
deleteScheduleById(id, variables.projectCode)
.then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
.catch((error: any) => {
window.$message.error(error.message)
})
}
return {
variables,
getTableData
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,720 | [Bug][UI Next][V1.0.0-Alpha] Workflow instance table action button is too small to click. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/19239641/156956044-82335648-458f-4af0-bcf6-be4f5cbfb713.png)
### What you expected to happen
Workflow instance table action button is too small to click.
### How to reproduce
Make the table button larger.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8720 | https://github.com/apache/dolphinscheduler/pull/8721 | b0fc6e7a695bd6a20092dc2baf1bacf7e2caba30 | ac18b195ec2c5e2792ab7e4da416a3740745d5b4 | "2022-03-07T02:16:12Z" | java | "2022-03-07T03:11:23Z" | dolphinscheduler-ui-next/src/views/projects/workflow/instance/components/table-action.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs } from 'vue'
import { NSpace, NTooltip, NButton, NIcon, NPopconfirm } from 'naive-ui'
import {
DeleteOutlined,
FormOutlined,
InfoCircleFilled,
SyncOutlined,
CloseOutlined,
CloseCircleOutlined,
PauseCircleOutlined,
ControlOutlined,
PlayCircleOutlined
} from '@vicons/antd'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import { IWorkflowInstance } from '@/service/modules/process-instances/types'
const props = {
row: {
type: Object as PropType<IWorkflowInstance>,
required: true
}
}
export default defineComponent({
name: 'TableAction',
props,
emits: [
'updateList',
'reRun',
'reStore',
'stop',
'suspend',
'deleteInstance'
],
setup(props, ctx) {
const router: Router = useRouter()
const handleEdit = () => {
router.push({
name: 'workflow-instance-detail',
params: { id: props.row!.id },
query: { code: props.row!.processDefinitionCode }
})
}
const handleGantt = () => {
router.push({
name: 'workflow-instance-gantt',
params: { id: props.row!.id },
query: { code: props.row!.processDefinitionCode }
})
}
const handleReRun = () => {
ctx.emit('reRun')
}
const handleReStore = () => {
ctx.emit('reStore')
}
const handleStop = () => {
ctx.emit('stop')
}
const handleSuspend = () => {
ctx.emit('suspend')
}
const handleDeleteInstance = () => {
ctx.emit('deleteInstance')
}
return {
handleEdit,
handleReRun,
handleReStore,
handleStop,
handleSuspend,
handleDeleteInstance,
handleGantt,
...toRefs(props)
}
},
render() {
const { t } = useI18n()
const state = this.row?.state
return (
<NSpace>
<NTooltip trigger={'hover'}>
{{
default: () => t('project.workflow.edit'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='info'
circle
disabled={
(state !== 'SUCCESS' &&
state !== 'PAUSE' &&
state !== 'FAILURE' &&
state !== 'STOP') ||
this.row?.disabled
}
onClick={this.handleEdit}
>
<NIcon>
<FormOutlined />
</NIcon>
</NButton>
)
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () => t('project.workflow.rerun'),
trigger: () => {
return (
<NButton
tag='div'
size='tiny'
type='info'
circle
onClick={this.handleReRun}
disabled={
(state !== 'SUCCESS' &&
state !== 'PAUSE' &&
state !== 'FAILURE' &&
state !== 'STOP') ||
this.row?.disabled
}
>
{this.row?.buttonType === 'run' ? (
<span>{this.row?.count}</span>
) : (
<NIcon>
<SyncOutlined />
</NIcon>
)}
</NButton>
)
}
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () => t('project.workflow.recovery_failed'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='primary'
circle
onClick={this.handleReStore}
disabled={state !== 'FAILURE' || this.row?.disabled}
>
{this.row?.buttonType === 'store' ? (
<span>{this.row?.count}</span>
) : (
<NIcon>
<CloseCircleOutlined />
</NIcon>
)}
</NButton>
)
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () =>
state === 'PAUSE'
? t('project.workflow.recovery_failed')
: t('project.workflow.stop'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='error'
circle
onClick={this.handleStop}
disabled={
(state !== 'RUNNING_EXECUTION' && state !== 'PAUSE') ||
this.row?.disabled
}
>
<NIcon>
{state === 'STOP' ? (
<PlayCircleOutlined />
) : (
<CloseOutlined />
)}
</NIcon>
</NButton>
)
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () =>
state === 'PAUSE'
? t('project.workflow.recovery_suspend')
: t('project.workflow.pause'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='warning'
circle
disabled={
(state !== 'RUNNING_EXECUTION' && state !== 'PAUSE') ||
this.row?.disabled
}
onClick={this.handleSuspend}
>
<NIcon>
{state === 'PAUSE' ? (
<PlayCircleOutlined />
) : (
<PauseCircleOutlined />
)}
</NIcon>
</NButton>
)
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () => t('project.workflow.delete'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='error'
circle
disabled={
(state !== 'SUCCESS' &&
state !== 'FAILURE' &&
state !== 'STOP' &&
state !== 'PAUSE') ||
this.row?.disabled
}
>
<NPopconfirm onPositiveClick={this.handleDeleteInstance}>
{{
default: () => t('project.workflow.delete_confirm'),
icon: () => (
<NIcon>
<InfoCircleFilled />
</NIcon>
),
trigger: () => (
<NIcon>
<DeleteOutlined />
</NIcon>
)
}}
</NPopconfirm>
</NButton>
)
}}
</NTooltip>
<NTooltip trigger={'hover'}>
{{
default: () => t('project.workflow.gantt'),
trigger: () => (
<NButton
tag='div'
size='tiny'
type='info'
circle
disabled={this.row?.disabled}
onClick={this.handleGantt}
>
<NIcon>
<ControlOutlined />
</NIcon>
</NButton>
)
}}
</NTooltip>
</NSpace>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,720 | [Bug][UI Next][V1.0.0-Alpha] Workflow instance table action button is too small to click. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/19239641/156956044-82335648-458f-4af0-bcf6-be4f5cbfb713.png)
### What you expected to happen
Workflow instance table action button is too small to click.
### How to reproduce
Make the table button larger.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8720 | https://github.com/apache/dolphinscheduler/pull/8721 | b0fc6e7a695bd6a20092dc2baf1bacf7e2caba30 | ac18b195ec2c5e2792ab7e4da416a3740745d5b4 | "2022-03-07T02:16:12Z" | java | "2022-03-07T03:11:23Z" | dolphinscheduler-ui-next/src/views/projects/workflow/instance/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import { format } from 'date-fns'
import { reactive, h, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import { NTooltip, NIcon, NSpin } from 'naive-ui'
import { RowKey } from 'naive-ui/lib/data-table/src/interface'
import {
queryProcessInstanceListPaging,
deleteProcessInstanceById,
batchDeleteProcessInstanceByIds
} from '@/service/modules/process-instances'
import { execute } from '@/service/modules/executors'
import TableAction from './components/table-action'
import { runningType, tasksState } from '@/utils/common'
import { IWorkflowInstance } from '@/service/modules/process-instances/types'
import { ICountDownParam } from './types'
import { ExecuteReq } from '@/service/modules/executors/types'
import { parseTime } from '@/utils/common'
import styles from './index.module.scss'
export function useTable() {
const { t } = useI18n()
const router: Router = useRouter()
const taskStateIcon = tasksState(t)
const variables = reactive({
columns: [],
checkedRowKeys: [] as Array<RowKey>,
tableData: [] as Array<IWorkflowInstance>,
page: ref(1),
pageSize: ref(10),
totalPage: ref(1),
searchVal: ref(),
executorName: ref(),
host: ref(),
stateType: ref(),
startDate: ref(),
endDate: ref(),
projectCode: ref(Number(router.currentRoute.value.params.projectCode))
})
const createColumns = (variables: any) => {
variables.columns = [
{
type: 'selection'
},
{
title: '#',
key: 'id',
width: 50,
render: (rowData: any, rowIndex: number) => rowIndex + 1
},
{
title: t('project.workflow.workflow_name'),
key: 'name',
width: 200,
render: (_row: IWorkflowInstance) =>
h(
'a',
{
href: 'javascript:',
class: styles.links,
onClick: () =>
router.push({
name: 'workflow-instance-detail',
params: { id: _row.id },
query: { code: _row.processDefinitionCode }
})
},
{
default: () => {
return _row.name
}
}
)
},
{
title: t('project.workflow.status'),
key: 'state',
render: (_row: IWorkflowInstance) => {
const stateIcon = taskStateIcon[_row.state]
const iconElement = h(
NIcon,
{
size: '18px',
style: 'position: relative; top: 7.5px; left: 7.5px'
},
{
default: () =>
h(stateIcon.icon, {
color: stateIcon.color
})
}
)
return h(
NTooltip,
{},
{
trigger: () => {
if (stateIcon.isSpin) {
return h(
NSpin,
{
small: 'small'
},
{
icon: () => iconElement
}
)
} else {
return iconElement
}
},
default: () => stateIcon!.desc
}
)
}
},
{
title: t('project.workflow.run_type'),
key: 'commandType',
render: (_row: IWorkflowInstance) =>
(
_.filter(runningType(t), (v) => v.code === _row.commandType)[0] ||
{}
).desc
},
{
title: t('project.workflow.scheduling_time'),
key: 'scheduleTime',
render: (_row: IWorkflowInstance) =>
_row.scheduleTime
? format(parseTime(_row.scheduleTime), 'yyyy-MM-dd HH:mm:ss')
: '-'
},
{
title: t('project.workflow.start_time'),
key: 'startTime',
render: (_row: IWorkflowInstance) =>
_row.startTime
? format(parseTime(_row.startTime), 'yyyy-MM-dd HH:mm:ss')
: '-'
},
{
title: t('project.workflow.end_time'),
key: 'endTime',
render: (_row: IWorkflowInstance) =>
_row.endTime
? format(parseTime(_row.endTime), 'yyyy-MM-dd HH:mm:ss')
: '-'
},
{
title: t('project.workflow.duration'),
key: 'duration',
render: (_row: IWorkflowInstance) => _row.duration || '-'
},
{
title: t('project.workflow.run_times'),
key: 'runTimes'
},
{
title: t('project.workflow.fault_tolerant_sign'),
key: 'recovery'
},
{
title: t('project.workflow.dry_run_flag'),
key: 'dryRun',
render: (_row: IWorkflowInstance) => (_row.dryRun === 1 ? 'YES' : 'NO')
},
{
title: t('project.workflow.executor'),
key: 'executorName'
},
{
title: t('project.workflow.host'),
key: 'host'
},
{
title: t('project.workflow.operation'),
key: 'operation',
width: 220,
fixed: 'right',
className: styles.operation,
render: (_row: IWorkflowInstance, index: number) =>
h(TableAction, {
row: _row,
onReRun: () =>
_countDownFn({
index,
processInstanceId: _row.id,
executeType: 'REPEAT_RUNNING',
buttonType: 'run'
}),
onReStore: () =>
_countDownFn({
index,
processInstanceId: _row.id,
executeType: 'START_FAILURE_TASK_PROCESS',
buttonType: 'store'
}),
onStop: () => {
if (_row.state === 'STOP') {
_countDownFn({
index,
processInstanceId: _row.id,
executeType: 'RECOVER_SUSPENDED_PROCESS',
buttonType: 'suspend'
})
} else {
_upExecutorsState({
processInstanceId: _row.id,
executeType: 'STOP'
})
}
},
onSuspend: () => {
if (_row.state === 'PAUSE') {
_countDownFn({
index,
processInstanceId: _row.id,
executeType: 'RECOVER_SUSPENDED_PROCESS',
buttonType: 'suspend'
})
} else {
_upExecutorsState({
processInstanceId: _row.id,
executeType: 'PAUSE'
})
}
},
onDeleteInstance: () => deleteInstance(_row.id)
})
}
]
}
const getTableData = () => {
const params = {
pageNo: variables.page,
pageSize: variables.pageSize,
searchVal: variables.searchVal,
executorName: variables.executorName,
host: variables.host,
stateType: variables.stateType,
startDate: variables.startDate,
endDate: variables.endDate
}
queryProcessInstanceListPaging({ ...params }, variables.projectCode).then(
(res: any) => {
variables.totalPage = res.totalPage
variables.tableData = res.totalList.map((item: any) => {
return { ...item }
})
}
)
}
const deleteInstance = (id: number) => {
deleteProcessInstanceById(id, variables.projectCode)
.then(() => {
window.$message.success(t('project.workflow.success'))
if (variables.tableData.length === 1 && variables.page > 1) {
variables.page -= 1
}
getTableData()
})
.catch((error: any) => {
window.$message.error(error.message || '')
getTableData()
})
}
const batchDeleteInstance = () => {
const data = {
processInstanceIds: _.join(variables.checkedRowKeys, ',')
}
batchDeleteProcessInstanceByIds(data, variables.projectCode)
.then(() => {
window.$message.success(t('project.workflow.success'))
if (
variables.tableData.length === variables.checkedRowKeys.length &&
variables.page > 1
) {
variables.page -= 1
}
variables.checkedRowKeys = []
getTableData()
})
.catch((error: any) => {
window.$message.error(error.message || '')
getTableData()
})
}
/**
* operating
*/
const _upExecutorsState = (param: ExecuteReq) => {
execute(param, variables.projectCode)
.then(() => {
window.$message.success(t('project.workflow.success'))
getTableData()
})
.catch((error: any) => {
window.$message.error(error.message || '')
getTableData()
})
}
/**
* Countdown
*/
const _countDown = (fn: any, index: number) => {
const TIME_COUNT = 10
let timer: number | undefined
let $count: number
if (!timer) {
$count = TIME_COUNT
timer = setInterval(() => {
if ($count > 0 && $count <= TIME_COUNT) {
$count--
variables.tableData[index].count = $count
} else {
fn()
clearInterval(timer)
timer = undefined
}
}, 1000)
}
}
/**
* Countdown method refresh
*/
const _countDownFn = (param: ICountDownParam) => {
const { index } = param
variables.tableData[index].buttonType = param.buttonType
execute(param, variables.projectCode)
.then(() => {
variables.tableData[index].disabled = true
window.$message.success(t('project.workflow.success'))
_countDown(() => {
getTableData()
}, index)
})
.catch((error: any) => {
window.$message.error(error.message)
getTableData()
})
}
return {
variables,
createColumns,
getTableData,
batchDeleteInstance
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,722 | [bug][UI Next][V1.0.0-Alpha] An error page | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
An error page
<img width="1235" alt="image" src="https://user-images.githubusercontent.com/76080484/156958639-10e7e382-491a-4587-9183-4d2750c65854.png">
<img width="1123" alt="image" src="https://user-images.githubusercontent.com/76080484/156958683-59f9d8dc-60fc-4612-8c80-fcfc0629c2b1.png">
### What you expected to happen
Normal use
### How to reproduce
Open the page
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8722 | https://github.com/apache/dolphinscheduler/pull/8731 | 5c640789c3dacac3fee3555ad601ac09d6bee099 | 63e85f314d2a60e6e43480c8ef9897adf64a899f | "2022-03-07T02:43:09Z" | java | "2022-03-07T07:43:59Z" | dolphinscheduler-ui-next/src/locales/index.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { createI18n } from 'vue-i18n'
import zh_CN from './modules/zh_CN'
import en_US from './modules/en_US'
const i18n = createI18n({
globalInjection: true,
locale: 'zh_CN',
messages: {
zh_CN,
en_US
}
})
export default i18n
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,690 | [Bug][UI Next][V1.0.0-Alpha] Workflow execution error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Workflow execution error
<img width="1571" alt="image" src="https://user-images.githubusercontent.com/76080484/156691383-e0cdcb28-94cf-41a2-bfad-8e5587c99dab.png">
If this ” execType“ field is not false, it will fail, otherwise it will succeed
<img width="1533" alt="image" src="https://user-images.githubusercontent.com/76080484/156691502-16627d54-1110-42da-88d9-6ba15c095f76.png">
path:/executors/start-process-instance
<img width="997" alt="image" src="https://user-images.githubusercontent.com/76080484/156691724-8089dfe4-af2b-4d4c-8b11-b914a2c21f3e.png">
### What you expected to happen
normal operation
### How to reproduce
Click Run workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8690 | https://github.com/apache/dolphinscheduler/pull/8734 | c51f2e4a7cdfca19cc0477871c9a589b684354f9 | e34f6fc807900c60cdc9bb5dff96701a94e8d17c | "2022-03-04T03:16:24Z" | java | "2022-03-07T09:04:37Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/start-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, h, onMounted, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import Modal from '@/components/modal'
import { useForm } from './use-form'
import { useModal } from './use-modal'
import {
NForm,
NFormItem,
NButton,
NIcon,
NInput,
NSpace,
NRadio,
NRadioGroup,
NSelect,
NSwitch,
NCheckbox,
NDatePicker
} from 'naive-ui'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
PlusCircleOutlined
} from '@vicons/antd'
import { IDefinitionData } from '../types'
import styles from '../index.module.scss'
const props = {
row: {
type: Object as PropType<IDefinitionData>,
default: {}
},
show: {
type: Boolean as PropType<boolean>,
default: false
}
}
export default defineComponent({
name: 'workflowDefinitionStart',
props,
emits: ['update:show', 'update:row', 'updateList'],
setup(props, ctx) {
const parallelismRef = ref(false)
const { t } = useI18n()
const { startState } = useForm()
const {
variables,
handleStartDefinition,
getWorkerGroups,
getAlertGroups,
getEnvironmentList
} = useModal(startState, ctx)
const hideModal = () => {
ctx.emit('update:show')
}
const handleStart = () => {
handleStartDefinition(props.row.code)
}
const generalWarningTypeListOptions = () => [
{
value: 'NONE',
label: t('project.workflow.none_send')
},
{
value: 'SUCCESS',
label: t('project.workflow.success_send')
},
{
value: 'FAILURE',
label: t('project.workflow.failure_send')
},
{
value: 'ALL',
label: t('project.workflow.all_send')
}
]
const generalPriorityList = () => [
{
value: 'HIGHEST',
label: 'HIGHEST',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'HIGH',
label: 'HIGH',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'MEDIUM',
label: 'MEDIUM',
color: '#EA7D24',
icon: ArrowUpOutlined
},
{
value: 'LOW',
label: 'LOW',
color: '#2A8734',
icon: ArrowDownOutlined
},
{
value: 'LOWEST',
label: 'LOWEST',
color: '#2A8734',
icon: ArrowDownOutlined
}
]
const renderLabel = (option: any) => {
return [
h(
NIcon,
{
style: {
verticalAlign: 'middle',
marginRight: '4px',
marginBottom: '3px'
},
color: option.color
},
{
default: () => h(option.icon)
}
),
option.label
]
}
const updateWorkerGroup = () => {
startState.startForm.environmentCode = null
}
const addStartParams = () => {
variables.startParamsList.push({
prop: '',
value: ''
})
}
const updateParamsList = (index: number, param: Array<string>) => {
variables.startParamsList[index].prop = param[0]
variables.startParamsList[index].value = param[1]
}
const removeStartParams = (index: number) => {
variables.startParamsList.splice(index, 1)
}
onMounted(() => {
getWorkerGroups()
getAlertGroups()
getEnvironmentList()
})
return {
t,
parallelismRef,
hideModal,
handleStart,
generalWarningTypeListOptions,
generalPriorityList,
renderLabel,
updateWorkerGroup,
removeStartParams,
addStartParams,
updateParamsList,
...toRefs(variables),
...toRefs(startState),
...toRefs(props)
}
},
render() {
const { t } = this
return (
<Modal
show={this.show}
title={t('project.workflow.set_parameters_before_starting')}
onCancel={this.hideModal}
onConfirm={this.handleStart}
>
<NForm ref='startFormRef' label-placement='left' label-width='160'>
<NFormItem
label={t('project.workflow.workflow_name')}
path='workflow_name'
>
{this.row.name}
</NFormItem>
<NFormItem
label={t('project.workflow.failure_strategy')}
path='failureStrategy'
>
<NRadioGroup v-model:value={this.startForm.failureStrategy}>
<NSpace>
<NRadio value='CONTINUE'>
{t('project.workflow.continue')}
</NRadio>
<NRadio value='END'>{t('project.workflow.end')}</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
<NFormItem
label={t('project.workflow.notification_strategy')}
path='warningType'
>
<NSelect
options={this.generalWarningTypeListOptions()}
v-model:value={this.startForm.warningType}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.workflow_priority')}
path='processInstancePriority'
>
<NSelect
options={this.generalPriorityList()}
renderLabel={this.renderLabel}
v-model:value={this.startForm.processInstancePriority}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.worker_group')}
path='workerGroup'
>
<NSelect
options={this.workerGroups}
onUpdateValue={this.updateWorkerGroup}
v-model:value={this.startForm.workerGroup}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.environment_name')}
path='environmentCode'
>
<NSelect
options={this.environmentList.filter((item: any) =>
item.workerGroups?.includes(this.startForm.workerGroup)
)}
v-model:value={this.startForm.environmentCode}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.alarm_group')}
path='warningGroupId'
>
<NSelect
options={this.alertGroups}
placeholder={t('project.workflow.please_choose')}
v-model:value={this.startForm.warningGroupId}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.complement_data')}
path='complement_data'
>
<NCheckbox
checkedValue={'COMPLEMENT_DATA'}
uncheckedValue={undefined}
v-model:checked={this.startForm.execType}
>
{t('project.workflow.whether_complement_data')}
</NCheckbox>
</NFormItem>
{this.startForm.execType && (
<NSpace>
<NFormItem
label={t('project.workflow.mode_of_execution')}
path='runMode'
>
<NRadioGroup v-model:value={this.startForm.runMode}>
<NSpace>
<NRadio value={'RUN_MODE_SERIAL'}>
{t('project.workflow.serial_execution')}
</NRadio>
<NRadio value={'RUN_MODE_PARALLEL'}>
{t('project.workflow.parallel_execution')}
</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
{this.startForm.runMode === 'RUN_MODE_PARALLEL' && (
<NFormItem
label={t('project.workflow.parallelism')}
path='expectedParallelismNumber'
>
<NCheckbox v-model:checked={this.parallelismRef}>
{t('project.workflow.custom_parallelism')}
</NCheckbox>
<NInput
disabled={!this.parallelismRef}
placeholder={t('project.workflow.please_enter_parallelism')}
v-model:value={this.startForm.expectedParallelismNumber}
/>
</NFormItem>
)}
<NFormItem
label={t('project.workflow.schedule_date')}
path='startEndTime'
>
<NDatePicker
type='datetimerange'
clearable
v-model:value={this.startForm.startEndTime}
/>
</NFormItem>
</NSpace>
)}
<NFormItem
label={t('project.workflow.startup_parameter')}
path='startup_parameter'
>
{this.startParamsList.length === 0 ? (
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
) : (
<NSpace vertical>
{this.startParamsList.map((item, index) => (
<NSpace class={styles.startup} key={index}>
<NInput
pair
separator=':'
placeholder={['prop', 'value']}
onUpdateValue={(param) =>
this.updateParamsList(index, param)
}
/>
<NButton
text
type='error'
onClick={() => this.removeStartParams(index)}
>
<NIcon>
<DeleteOutlined />
</NIcon>
</NButton>
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
</NSpace>
))}
</NSpace>
)}
</NFormItem>
<NFormItem
label={t('project.workflow.whether_dry_run')}
path='dryRun'
>
<NSwitch
checkedValue={1}
uncheckedValue={0}
v-model:value={this.startForm.dryRun}
/>
</NFormItem>
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,690 | [Bug][UI Next][V1.0.0-Alpha] Workflow execution error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Workflow execution error
<img width="1571" alt="image" src="https://user-images.githubusercontent.com/76080484/156691383-e0cdcb28-94cf-41a2-bfad-8e5587c99dab.png">
If this ” execType“ field is not false, it will fail, otherwise it will succeed
<img width="1533" alt="image" src="https://user-images.githubusercontent.com/76080484/156691502-16627d54-1110-42da-88d9-6ba15c095f76.png">
path:/executors/start-process-instance
<img width="997" alt="image" src="https://user-images.githubusercontent.com/76080484/156691724-8089dfe4-af2b-4d4c-8b11-b914a2c21f3e.png">
### What you expected to happen
normal operation
### How to reproduce
Click Run workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8690 | https://github.com/apache/dolphinscheduler/pull/8734 | c51f2e4a7cdfca19cc0477871c9a589b684354f9 | e34f6fc807900c60cdc9bb5dff96701a94e8d17c | "2022-03-04T03:16:24Z" | java | "2022-03-07T09:04:37Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/use-form.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { reactive, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import type { FormRules } from 'naive-ui'
export const useForm = () => {
const { t } = useI18n()
const date = new Date()
const year = date.getFullYear()
const month = date.getMonth()
const day = date.getDate()
const importState = reactive({
importFormRef: ref(),
importForm: {
name: '',
file: ''
},
importRules: {
file: {
required: true,
trigger: ['input', 'blur'],
validator() {
if (importState.importForm.name === '') {
return new Error(t('project.workflow.enter_name_tips'))
}
}
}
} as FormRules
})
const startState = reactive({
startFormRef: ref(),
startForm: {
processDefinitionCode: -1,
startEndTime: [new Date(year, month, day), new Date(year, month, day)],
scheduleTime: null,
failureStrategy: 'CONTINUE',
warningType: 'NONE',
warningGroupId: null,
execType: '',
startNodeList: '',
taskDependType: 'TASK_POST',
runMode: 'RUN_MODE_SERIAL',
processInstancePriority: 'MEDIUM',
workerGroup: 'default',
environmentCode: null,
startParams: null,
expectedParallelismNumber: '',
dryRun: 0
}
})
const timingState = reactive({
timingFormRef: ref(),
timingForm: {
startEndTime: [
new Date(year, month, day),
new Date(year + 100, month, day)
],
crontab: '0 0 * * * ? *',
timezoneId: Intl.DateTimeFormat().resolvedOptions().timeZone,
failureStrategy: 'CONTINUE',
warningType: 'NONE',
processInstancePriority: 'MEDIUM',
warningGroupId: '',
workerGroup: 'default',
environmentCode: null
}
})
return {
importState,
startState,
timingState
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,497 | [Feature][UI Next][V1.0.0-Alpha] Dependent tasks can re-run automatically in the case of complement | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add a select button to control the switch of complement dependent process.
Api:
Add param ```complementDependentMode``` in ```projects/{projectCode}/executors/start-process-instance```
Enum:
1. OFF_MODE (default, not required)
2. ALL_DEPENDENT
### Use case
#8373
### Related issues
#8373
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8497 | https://github.com/apache/dolphinscheduler/pull/8739 | 1d7ee2c5c444b538f3606e0ba4b22d64f0c2686d | aa5392529bb8d2ba7b4b73a9527adf713f8884c8 | "2022-02-23T04:41:14Z" | java | "2022-03-07T10:05:58Z" | dolphinscheduler-ui-next/src/locales/modules/en_US.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const login = {
test: 'Test',
userName: 'Username',
userName_tips: 'Please enter your username',
userPassword: 'Password',
userPassword_tips: 'Please enter your password',
login: 'Login'
}
const modal = {
cancel: 'Cancel',
confirm: 'Confirm'
}
const theme = {
light: 'Light',
dark: 'Dark'
}
const userDropdown = {
profile: 'Profile',
password: 'Password',
logout: 'Logout'
}
const menu = {
home: 'Home',
project: 'Project',
resources: 'Resources',
datasource: 'Datasource',
monitor: 'Monitor',
security: 'Security',
project_overview: 'Project Overview',
workflow_relation: 'Workflow Relation',
workflow: 'Workflow',
workflow_definition: 'Workflow Definition',
workflow_instance: 'Workflow Instance',
task: 'Task',
task_instance: 'Task Instance',
task_definition: 'Task Definition',
file_manage: 'File Manage',
udf_manage: 'UDF Manage',
resource_manage: 'Resource Manage',
function_manage: 'Function Manage',
service_manage: 'Service Manage',
master: 'Master',
worker: 'Worker',
db: 'DB',
statistical_manage: 'Statistical Manage',
statistics: 'Statistics',
audit_log: 'Audit Log',
tenant_manage: 'Tenant Manage',
user_manage: 'User Manage',
alarm_group_manage: 'Alarm Group Manage',
alarm_instance_manage: 'Alarm Instance Manage',
worker_group_manage: 'Worker Group Manage',
yarn_queue_manage: 'Yarn Queue Manage',
environment_manage: 'Environment Manage',
k8s_namespace_manage: 'K8S Namespace Manage',
token_manage: 'Token Manage',
task_group_manage: 'Task Group Manage',
task_group_option: 'Task Group Option',
task_group_queue: 'Task Group Queue',
data_quality: 'Data Quality',
task_result: 'Task Result',
rule: 'Rule management'
}
const home = {
task_state_statistics: 'Task State Statistics',
process_state_statistics: 'Process State Statistics',
process_definition_statistics: 'Process Definition Statistics',
number: 'Number',
state: 'State',
submitted_success: 'SUBMITTED_SUCCESS',
running_execution: 'RUNNING_EXECUTION',
ready_pause: 'READY_PAUSE',
pause: 'PAUSE',
ready_stop: 'READY_STOP',
stop: 'STOP',
failure: 'FAILURE',
success: 'SUCCESS',
need_fault_tolerance: 'NEED_FAULT_TOLERANCE',
kill: 'KILL',
waiting_thread: 'WAITING_THREAD',
waiting_depend: 'WAITING_DEPEND',
delay_execution: 'DELAY_EXECUTION',
forced_success: 'FORCED_SUCCESS',
serial_wait: 'SERIAL_WAIT'
}
const password = {
edit_password: 'Edit Password',
password: 'Password',
confirm_password: 'Confirm Password',
password_tips: 'Please enter your password',
confirm_password_tips: 'Please enter your confirm password',
two_password_entries_are_inconsistent:
'Two password entries are inconsistent',
submit: 'Submit'
}
const profile = {
profile: 'Profile',
edit: 'Edit',
username: 'Username',
email: 'Email',
phone: 'Phone',
state: 'State',
permission: 'Permission',
create_time: 'Create Time',
update_time: 'Update Time',
administrator: 'Administrator',
ordinary_user: 'Ordinary User',
edit_profile: 'Edit Profile',
username_tips: 'Please enter your username',
email_tips: 'Please enter your email',
email_correct_tips: 'Please enter your email in the correct format',
phone_tips: 'Please enter your phone',
state_tips: 'Please choose your state',
enable: 'Enable',
disable: 'Disable'
}
const monitor = {
master: {
cpu_usage: 'CPU Usage',
memory_usage: 'Memory Usage',
load_average: 'Load Average',
create_time: 'Create Time',
last_heartbeat_time: 'Last Heartbeat Time',
directory_detail: 'Directory Detail',
host: 'Host',
directory: 'Directory'
},
worker: {
cpu_usage: 'CPU Usage',
memory_usage: 'Memory Usage',
load_average: 'Load Average',
create_time: 'Create Time',
last_heartbeat_time: 'Last Heartbeat Time',
directory_detail: 'Directory Detail',
host: 'Host',
directory: 'Directory'
},
db: {
health_state: 'Health State',
max_connections: 'Max Connections',
threads_connections: 'Threads Connections',
threads_running_connections: 'Threads Running Connections'
},
statistics: {
command_number_of_waiting_for_running:
'Command Number Of Waiting For Running',
failure_command_number: 'Failure Command Number',
tasks_number_of_waiting_running: 'Tasks Number Of Waiting Running',
task_number_of_ready_to_kill: 'Task Number Of Ready To Kill'
},
audit_log: {
user_name: 'User Name',
resource_type: 'Resource Type',
project_name: 'Project Name',
operation_type: 'Operation Type',
create_time: 'Create Time',
start_time: 'Start Time',
end_time: 'End Time',
user_audit: 'User Audit',
project_audit: 'Project Audit',
create: 'Create',
update: 'Update',
delete: 'Delete',
read: 'Read'
}
}
const resource = {
file: {
file_manage: 'File Manage',
create_folder: 'Create Folder',
create_file: 'Create File',
upload_files: 'Upload Files',
enter_keyword_tips: 'Please enter keyword',
name: 'Name',
user_name: 'Resource userName',
whether_directory: 'Whether directory',
file_name: 'File Name',
description: 'Description',
size: 'Size',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
rename: 'Rename',
download: 'Download',
delete: 'Delete',
yes: 'Yes',
no: 'No',
folder_name: 'Folder Name',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description',
enter_content_tips: 'Please enter the resource content',
file_format: 'File Format',
file_content: 'File Content',
delete_confirm: 'Delete?',
confirm: 'Confirm',
cancel: 'Cancel',
success: 'Success',
file_details: 'File Details',
return: 'Return',
save: 'Save'
},
udf: {
udf_resources: 'UDF resources',
create_folder: 'Create Folder',
upload_udf_resources: 'Upload UDF Resources',
udf_source_name: 'UDF Resource Name',
whether_directory: 'Whether directory',
file_name: 'File Name',
file_size: 'File Size',
description: 'Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
yes: 'Yes',
no: 'No',
edit: 'Edit',
download: 'Download',
delete: 'Delete',
delete_confirm: 'Delete?',
success: 'Success',
folder_name: 'Folder Name',
upload: 'Upload',
upload_files: 'Upload Files',
file_upload: 'File Upload',
enter_keyword_tips: 'Please enter keyword',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description'
},
function: {
udf_function: 'UDF Function',
create_udf_function: 'Create UDF Function',
edit_udf_function: 'Create UDF Function',
udf_function_name: 'UDF Function Name',
class_name: 'Class Name',
type: 'Type',
description: 'Description',
jar_package: 'Jar Package',
update_time: 'Update Time',
operation: 'Operation',
rename: 'Rename',
edit: 'Edit',
delete: 'Delete',
success: 'Success',
package_name: 'Package Name',
udf_resources: 'UDF Resources',
instructions: 'Instructions',
upload_resources: 'Upload Resources',
udf_resources_directory: 'UDF resources directory',
delete_confirm: 'Delete?',
enter_keyword_tips: 'Please enter keyword',
enter_udf_unction_name_tips: 'Please enter a UDF function name',
enter_package_name_tips: 'Please enter a Package name',
enter_select_udf_resources_tips: 'Please select UDF resources',
enter_select_udf_resources_directory_tips:
'Please select UDF resources directory',
enter_instructions_tips: 'Please enter a instructions',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description'
},
task_group_option: {
manage: 'Task group manage',
option: 'Task group option',
create: 'Create task group',
edit: 'Edit task group',
delete: 'Delete task group',
view_queue: 'View the queue of the task group',
switch_status: 'Switch status',
code: 'Task group code',
name: 'Task group name',
project_name: 'Project name',
resource_pool_size: 'Resource pool size',
resource_pool_size_be_a_number:
'The size of the task group resource pool should be more than 1',
resource_used_pool_size: 'Used resource',
desc: 'Task group desc',
status: 'Task group status',
enable_status: 'Enable',
disable_status: 'Disable',
please_enter_name: 'Please enter task group name',
please_enter_desc: 'Please enter task group description',
please_enter_resource_pool_size:
'Please enter task group resource pool size',
please_select_project: 'Please select a project',
create_time: 'Create time',
update_time: 'Update time',
actions: 'Actions',
please_enter_keywords: 'Please enter keywords'
},
task_group_queue: {
actions: 'Actions',
task_name: 'Task name',
task_group_name: 'Task group name',
project_name: 'Project name',
process_name: 'Process name',
process_instance_name: 'Process instance',
queue: 'Task group queue',
priority: 'Priority',
priority_be_a_number:
'The priority of the task group queue should be a positive number',
force_starting_status: 'Starting status',
in_queue: 'In queue',
task_status: 'Task status',
view: 'View task group queue',
the_status_of_waiting: 'Waiting into the queue',
the_status_of_queuing: 'Queuing',
the_status_of_releasing: 'Released',
modify_priority: 'Edit the priority',
start_task: 'Start the task',
priority_not_empty: 'The value of priority can not be empty',
priority_must_be_number: 'The value of priority should be number',
please_select_task_name: 'Please select a task name',
create_time: 'Create time',
update_time: 'Update time',
edit_priority: 'Edit the task priority'
}
}
const project = {
list: {
create_project: 'Create Project',
edit_project: 'Edit Project',
project_list: 'Project List',
project_tips: 'Please enter your project',
description_tips: 'Please enter your description',
username_tips: 'Please enter your username',
project_name: 'Project Name',
project_description: 'Project Description',
owned_users: 'Owned Users',
workflow_define_count: 'Workflow Define Count',
process_instance_running_count: 'Process Instance Running Count',
description: 'Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
delete_confirm: 'Delete?'
},
workflow: {
workflow_relation: 'Workflow Relation',
create_workflow: 'Create Workflow',
import_workflow: 'Import Workflow',
workflow_name: 'Workflow Name',
current_selection: 'Current Selection',
online: 'Online',
offline: 'Offline',
refresh: 'Refresh',
show_hide_label: 'Show / Hide Label',
workflow_offline: 'Workflow Offline',
schedule_offline: 'Schedule Offline',
schedule_start_time: 'Schedule Start Time',
schedule_end_time: 'Schedule End Time',
crontab_expression: 'Crontab',
workflow_publish_status: 'Workflow Publish Status',
schedule_publish_status: 'Schedule Publish Status',
workflow_definition: 'Workflow Definition',
workflow_instance: 'Workflow Instance',
status: 'Status',
create_time: 'Create Time',
update_time: 'Update Time',
description: 'Description',
create_user: 'Create User',
modify_user: 'Modify User',
operation: 'Operation',
edit: 'Edit',
start: 'Start',
timing: 'Timing',
timezone: 'Timezone',
up_line: 'Online',
down_line: 'Offline',
copy_workflow: 'Copy Workflow',
cron_manage: 'Cron manage',
delete: 'Delete',
tree_view: 'Tree View',
tree_limit: 'Limit Size',
export: 'Export',
version_info: 'Version Info',
version: 'Version',
file_upload: 'File Upload',
upload_file: 'Upload File',
upload: 'Upload',
file_name: 'File Name',
success: 'Success',
set_parameters_before_starting: 'Please set the parameters before starting',
set_parameters_before_timing: 'Set parameters before timing',
start_and_stop_time: 'Start and stop time',
next_five_execution_times: 'Next five execution times',
execute_time: 'Execute time',
failure_strategy: 'Failure Strategy',
notification_strategy: 'Notification Strategy',
workflow_priority: 'Workflow Priority',
worker_group: 'Worker Group',
environment_name: 'Environment Name',
alarm_group: 'Alarm Group',
complement_data: 'Complement Data',
startup_parameter: 'Startup Parameter',
whether_dry_run: 'Whether Dry-Run',
continue: 'Continue',
end: 'End',
none_send: 'None',
success_send: 'Success',
failure_send: 'Failure',
all_send: 'All',
whether_complement_data: 'Whether it is a complement process?',
schedule_date: 'Schedule date',
mode_of_execution: 'Mode of execution',
serial_execution: 'Serial execution',
parallel_execution: 'Parallel execution',
parallelism: 'Parallelism',
custom_parallelism: 'Custom Parallelism',
please_enter_parallelism: 'Please enter Parallelism',
please_choose: 'Please Choose',
start_time: 'Start Time',
end_time: 'End Time',
crontab: 'Crontab',
delete_confirm: 'Delete?',
enter_name_tips: 'Please enter name',
switch_version: 'Switch To This Version',
confirm_switch_version: 'Confirm Switch To This Version?',
current_version: 'Current Version',
run_type: 'Run Type',
scheduling_time: 'Scheduling Time',
duration: 'Duration',
run_times: 'Run Times',
fault_tolerant_sign: 'Fault-tolerant Sign',
dry_run_flag: 'Dry-run Flag',
executor: 'Executor',
host: 'Host',
start_process: 'Start Process',
execute_from_the_current_node: 'Execute from the current node',
recover_tolerance_fault_process: 'Recover tolerance fault process',
resume_the_suspension_process: 'Resume the suspension process',
execute_from_the_failed_nodes: 'Execute from the failed nodes',
scheduling_execution: 'Scheduling execution',
rerun: 'Rerun',
stop: 'Stop',
pause: 'Pause',
recovery_waiting_thread: 'Recovery waiting thread',
recover_serial_wait: 'Recover serial wait',
recovery_suspend: 'Recovery Suspend',
recovery_failed: 'Recovery Failed',
gantt: 'Gantt',
name: 'Name',
all_status: 'AllStatus',
submit_success: 'Submitted successfully',
running: 'Running',
ready_to_pause: 'Ready to pause',
ready_to_stop: 'Ready to stop',
failed: 'Failed',
need_fault_tolerance: 'Need fault tolerance',
kill: 'Kill',
waiting_for_thread: 'Waiting for thread',
waiting_for_dependence: 'Waiting for dependence',
waiting_for_dependency_to_complete: 'Waiting for dependency to complete',
delay_execution: 'Delay execution',
forced_success: 'Forced success',
serial_wait: 'Serial wait',
executing: 'Executing',
startup_type: 'Startup Type',
complement_range: 'Complement Range',
parameters_variables: 'Parameters variables',
global_parameters: 'Global parameters',
local_parameters: 'Local parameters',
type: 'Type',
retry_count: 'Retry Count',
submit_time: 'Submit Time',
refresh_status_succeeded: 'Refresh status succeeded',
view_log: 'View log',
update_log_success: 'Update log success',
no_more_log: 'No more logs',
no_log: 'No log',
loading_log: 'Loading Log...',
close: 'Close',
download_log: 'Download Log',
refresh_log: 'Refresh Log',
enter_full_screen: 'Enter full screen',
cancel_full_screen: 'Cancel full screen',
task_state: 'Task status'
},
task: {
task_name: 'Task Name',
task_type: 'Task Type',
create_task: 'Create Task',
workflow_instance: 'Workflow Instance',
workflow_name: 'Workflow Name',
workflow_name_tips: 'Please select workflow name',
workflow_state: 'Workflow State',
version: 'Version',
current_version: 'Current Version',
switch_version: 'Switch To This Version',
confirm_switch_version: 'Confirm Switch To This Version?',
description: 'Description',
move: 'Move',
upstream_tasks: 'Upstream Tasks',
executor: 'Executor',
node_type: 'Node Type',
state: 'State',
submit_time: 'Submit Time',
start_time: 'Start Time',
create_time: 'Create Time',
update_time: 'Update Time',
end_time: 'End Time',
duration: 'Duration',
retry_count: 'Retry Count',
dry_run_flag: 'Dry Run Flag',
host: 'Host',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?',
submitted_success: 'Submitted Success',
running_execution: 'Running Execution',
ready_pause: 'Ready Pause',
pause: 'Pause',
ready_stop: 'Ready Stop',
stop: 'Stop',
failure: 'Failure',
success: 'Success',
need_fault_tolerance: 'Need Fault Tolerance',
kill: 'Kill',
waiting_thread: 'Waiting Thread',
waiting_depend: 'Waiting Depend',
delay_execution: 'Delay Execution',
forced_success: 'Forced Success',
view_log: 'View Log',
download_log: 'Download Log'
},
dag: {
create: 'Create Workflow',
search: 'Search',
download_png: 'Download PNG',
fullscreen_open: 'Open Fullscreen',
fullscreen_close: 'Close Fullscreen',
save: 'Save',
close: 'Close',
format: 'Format',
refresh_dag_status: 'Refresh DAG status',
layout_type: 'Layout Type',
grid_layout: 'Grid',
dagre_layout: 'Dagre',
rows: 'Rows',
cols: 'Cols',
copy_success: 'Copy Success',
workflow_name: 'Workflow Name',
description: 'Description',
tenant: 'Tenant',
timeout_alert: 'Timeout Alert',
global_variables: 'Global Variables',
basic_info: 'Basic Information',
minute: 'Minute',
key: 'Key',
value: 'Value',
success: 'Success',
delete_cell: 'Delete selected edges and nodes',
online_directly: 'Whether to go online the process definition',
dag_name_empty: 'DAG graph name cannot be empty',
positive_integer: 'Please enter a positive integer greater than 0',
prop_empty: 'prop is empty',
prop_repeat: 'prop is repeat',
node_not_created: 'Failed to save node not created',
copy_name: 'Copy Name',
view_variables: 'View Variables',
startup_parameter: 'Startup Parameter'
},
node: {
current_node_settings: 'Current node settings',
instructions: 'Instructions',
view_history: 'View history',
view_log: 'View log',
enter_this_child_node: 'Enter this child node',
name: 'Node name',
name_tips: 'Please enter name (required)',
task_type: 'Task Type',
task_type_tips: 'Please select a task type (required)',
process_name: 'Process Name',
process_name_tips: 'Please select a process (required)',
child_node: 'Child Node',
enter_child_node: 'Enter child node',
run_flag: 'Run flag',
normal: 'Normal',
prohibition_execution: 'Prohibition execution',
description: 'Description',
description_tips: 'Please enter description',
task_priority: 'Task priority',
worker_group: 'Worker group',
worker_group_tips:
'The Worker group no longer exists, please select the correct Worker group!',
environment_name: 'Environment Name',
task_group_name: 'Task group name',
task_group_queue_priority: 'Priority',
number_of_failed_retries: 'Number of failed retries',
times: 'Times',
failed_retry_interval: 'Failed retry interval',
minute: 'Minute',
delay_execution_time: 'Delay execution time',
state: 'State',
branch_flow: 'Branch flow',
cancel: 'Cancel',
loading: 'Loading...',
confirm: 'Confirm',
success: 'Success',
failed: 'Failed',
backfill_tips:
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
task_instance_tips:
'The task has not been executed and cannot enter the sub-Process',
branch_tips:
'Cannot select the same node for successful branch flow and failed branch flow',
timeout_alarm: 'Timeout alarm',
timeout_strategy: 'Timeout strategy',
timeout_strategy_tips: 'Timeout strategy must be selected',
timeout_failure: 'Timeout failure',
timeout_period: 'Timeout period',
timeout_period_tips: 'Timeout must be a positive integer',
script: 'Script',
script_tips: 'Please enter script(required)',
resources: 'Resources',
resources_tips: 'Please select resources',
non_resources_tips: 'Please delete all non-existent resources',
useless_resources_tips: 'Unauthorized or deleted resources',
custom_parameters: 'Custom Parameters',
copy_success: 'Copy success',
copy_failed: 'The browser does not support automatic copying',
prop_tips: 'prop(required)',
prop_repeat: 'prop is repeat',
value_tips: 'value(optional)',
value_required_tips: 'value(required)',
pre_tasks: 'Pre tasks',
program_type: 'Program Type',
spark_version: 'Spark Version',
main_class: 'Main Class',
main_class_tips: 'Please enter main class',
main_package: 'Main Package',
main_package_tips: 'Please enter main package',
deploy_mode: 'Deploy Mode',
app_name: 'App Name',
app_name_tips: 'Please enter app name(optional)',
driver_cores: 'Driver Cores',
driver_cores_tips: 'Please enter Driver cores',
driver_memory: 'Driver Memory',
driver_memory_tips: 'Please enter Driver memory',
executor_number: 'Executor Number',
executor_number_tips: 'Please enter Executor number',
executor_memory: 'Executor Memory',
executor_memory_tips: 'Please enter Executor memory',
executor_cores: 'Executor Cores',
executor_cores_tips: 'Please enter Executor cores',
main_arguments: 'Main Arguments',
main_arguments_tips: 'Please enter main arguments',
option_parameters: 'Option Parameters',
option_parameters_tips: 'Please enter option parameters',
positive_integer_tips: 'should be a positive integer',
flink_version: 'Flink Version',
job_manager_memory: 'JobManager Memory',
job_manager_memory_tips: 'Please enter JobManager memory',
task_manager_memory: 'TaskManager Memory',
task_manager_memory_tips: 'Please enter TaskManager memory',
slot_number: 'Slot Number',
slot_number_tips: 'Please enter Slot number',
parallelism: 'Parallelism',
custom_parallelism: 'Configure parallelism',
parallelism_tips: 'Please enter Parallelism',
parallelism_number_tips: 'Parallelism number should be positive integer',
parallelism_complement_tips:
'If there are a large number of tasks requiring complement, you can use the custom parallelism to ' +
'set the complement task thread to a reasonable value to avoid too large impact on the server.',
task_manager_number: 'TaskManager Number',
task_manager_number_tips: 'Please enter TaskManager number',
http_url: 'Http Url',
http_url_tips: 'Please Enter Http Url',
http_method: 'Http Method',
http_parameters: 'Http Parameters',
http_check_condition: 'Http Check Condition',
http_condition: 'Http Condition',
http_condition_tips: 'Please Enter Http Condition',
timeout_settings: 'Timeout Settings',
connect_timeout: 'Connect Timeout',
ms: 'ms',
socket_timeout: 'Socket Timeout',
status_code_default: 'Default response code 200',
status_code_custom: 'Custom response code',
body_contains: 'Content includes',
body_not_contains: 'Content does not contain',
http_parameters_position: 'Http Parameters Position',
target_task_name: 'Target Task Name',
target_task_name_tips: 'Please enter the Pigeon task name',
datasource_type: 'Datasource types',
datasource_instances: 'Datasource instances',
sql_type: 'SQL Type',
sql_type_query: 'Query',
sql_type_non_query: 'Non Query',
sql_statement: 'SQL Statement',
pre_sql_statement: 'Pre SQL Statement',
post_sql_statement: 'Post SQL Statement',
sql_input_placeholder: 'Please enter non-query sql.',
sql_empty_tips: 'The sql can not be empty.',
procedure_method: 'SQL Statement',
procedure_method_tips: 'Please enter the procedure script',
procedure_method_snippet:
'--Please enter the procedure script \n\n--call procedure:call <procedure-name>[(<arg1>,<arg2>, ...)]\n\n--call function:?= call <procedure-name>[(<arg1>,<arg2>, ...)]',
start: 'Start',
edit: 'Edit',
copy: 'Copy',
delete: 'Delete',
custom_job: 'Custom Job',
custom_script: 'Custom Script',
sqoop_job_name: 'Job Name',
sqoop_job_name_tips: 'Please enter Job Name(required)',
direct: 'Direct',
hadoop_custom_params: 'Hadoop Params',
sqoop_advanced_parameters: 'Sqoop Advanced Parameters',
data_source: 'Data Source',
type: 'Type',
datasource: 'Datasource',
datasource_tips: 'Please select the datasource',
model_type: 'ModelType',
form: 'Form',
table: 'Table',
table_tips: 'Please enter Mysql Table(required)',
column_type: 'ColumnType',
all_columns: 'All Columns',
some_columns: 'Some Columns',
column: 'Column',
column_tips: 'Please enter Columns (Comma separated)',
database: 'Database',
database_tips: 'Please enter Hive Database(required)',
hive_table_tips: 'Please enter Hive Table(required)',
hive_partition_keys: 'Hive partition Keys',
hive_partition_keys_tips: 'Please enter Hive Partition Keys',
hive_partition_values: 'Hive partition Values',
hive_partition_values_tips: 'Please enter Hive Partition Values',
export_dir: 'Export Dir',
export_dir_tips: 'Please enter Export Dir(required)',
sql_statement_tips: 'SQL Statement(required)',
map_column_hive: 'Map Column Hive',
map_column_java: 'Map Column Java',
data_target: 'Data Target',
create_hive_table: 'CreateHiveTable',
drop_delimiter: 'DropDelimiter',
over_write_src: 'OverWriteSrc',
hive_target_dir: 'Hive Target Dir',
hive_target_dir_tips: 'Please enter hive target dir',
replace_delimiter: 'ReplaceDelimiter',
replace_delimiter_tips: 'Please enter Replace Delimiter',
target_dir: 'Target Dir',
target_dir_tips: 'Please enter Target Dir(required)',
delete_target_dir: 'DeleteTargetDir',
compression_codec: 'CompressionCodec',
file_type: 'FileType',
fields_terminated: 'FieldsTerminated',
fields_terminated_tips: 'Please enter Fields Terminated',
lines_terminated: 'LinesTerminated',
lines_terminated_tips: 'Please enter Lines Terminated',
is_update: 'IsUpdate',
update_key: 'UpdateKey',
update_key_tips: 'Please enter Update Key',
update_mode: 'UpdateMode',
only_update: 'OnlyUpdate',
allow_insert: 'AllowInsert',
concurrency: 'Concurrency',
concurrency_tips: 'Please enter Concurrency',
sea_tunnel_master: 'Master',
sea_tunnel_master_url: 'Master URL',
sea_tunnel_queue: 'Queue',
sea_tunnel_master_url_tips:
'Please enter the master url, e.g., 127.0.0.1:7077',
switch_condition: 'Condition',
switch_branch_flow: 'Branch Flow',
and: 'and',
or: 'or',
datax_custom_template: 'Custom Template Switch',
datax_json_template: 'JSON',
datax_target_datasource_type: 'Target Datasource Type',
datax_target_database: 'Target Database',
datax_target_table: 'Target Table',
datax_target_table_tips: 'Please enter the name of the target table',
datax_target_database_pre_sql: 'Pre SQL Statement',
datax_target_database_post_sql: 'Post SQL Statement',
datax_non_query_sql_tips: 'Please enter the non-query sql statement',
datax_job_speed_byte: 'Speed(Byte count)',
datax_job_speed_byte_info: '(0 means unlimited)',
datax_job_speed_record: 'Speed(Record count)',
datax_job_speed_record_info: '(0 means unlimited)',
datax_job_runtime_memory: 'Runtime Memory Limits',
datax_job_runtime_memory_xms: 'Low Limit Value',
datax_job_runtime_memory_xmx: 'High Limit Value',
datax_job_runtime_memory_unit: 'G',
current_hour: 'CurrentHour',
last_1_hour: 'Last1Hour',
last_2_hour: 'Last2Hours',
last_3_hour: 'Last3Hours',
last_24_hour: 'Last24Hours',
today: 'today',
last_1_days: 'Last1Days',
last_2_days: 'Last2Days',
last_3_days: 'Last3Days',
last_7_days: 'Last7Days',
this_week: 'ThisWeek',
last_week: 'LastWeek',
last_monday: 'LastMonday',
last_tuesday: 'LastTuesday',
last_wednesday: 'LastWednesday',
last_thursday: 'LastThursday',
last_friday: 'LastFriday',
last_saturday: 'LastSaturday',
last_sunday: 'LastSunday',
this_month: 'ThisMonth',
last_month: 'LastMonth',
last_month_begin: 'LastMonthBegin',
last_month_end: 'LastMonthEnd',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
add_dependency: 'Add dependency',
waiting_dependent_start: 'Waiting Dependent start',
check_interval: 'Check interval',
waiting_dependent_complete: 'Waiting Dependent complete',
rule_name: 'Rule Name',
null_check: 'NullCheck',
custom_sql: 'CustomSql',
multi_table_accuracy: 'MulTableAccuracy',
multi_table_value_comparison: 'MulTableCompare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
required: 'required'
}
}
const security = {
tenant: {
tenant_manage: 'Tenant Manage',
create_tenant: 'Create Tenant',
search_tips: 'Please enter keywords',
tenant_code: 'Operating System Tenant',
description: 'Description',
queue_name: 'QueueName',
create_time: 'Create Time',
update_time: 'Update Time',
actions: 'Operation',
edit_tenant: 'Edit Tenant',
tenant_code_tips: 'Please enter the operating system tenant',
queue_name_tips: 'Please select queue',
description_tips: 'Please enter a description',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete'
},
alarm_group: {
create_alarm_group: 'Create Alarm Group',
edit_alarm_group: 'Edit Alarm Group',
search_tips: 'Please enter keywords',
alert_group_name_tips: 'Please enter your alert group name',
alarm_plugin_instance: 'Alarm Plugin Instance',
alarm_plugin_instance_tips: 'Please select alert plugin instance',
alarm_group_description_tips: 'Please enter your alarm group description',
alert_group_name: 'Alert Group Name',
alarm_group_description: 'Alarm Group Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete'
},
worker_group: {
create_worker_group: 'Create Worker Group',
edit_worker_group: 'Edit Worker Group',
search_tips: 'Please enter keywords',
operation: 'Operation',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete',
group_name: 'Group Name',
group_name_tips: 'Please enter your group name',
worker_addresses: 'Worker Addresses',
worker_addresses_tips: 'Please select worker addresses',
create_time: 'Create Time',
update_time: 'Update Time'
},
yarn_queue: {
create_queue: 'Create Queue',
edit_queue: 'Edit Queue',
search_tips: 'Please enter keywords',
queue_name: 'Queue Name',
queue_value: 'Queue Value',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
queue_name_tips: 'Please enter your queue name',
queue_value_tips: 'Please enter your queue value'
},
environment: {
create_environment: 'Create Environment',
edit_environment: 'Edit Environment',
search_tips: 'Please enter keywords',
edit: 'Edit',
delete: 'Delete',
environment_name: 'Environment Name',
environment_config: 'Environment Config',
environment_desc: 'Environment Desc',
worker_groups: 'Worker Groups',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
delete_confirm: 'Delete?',
environment_name_tips: 'Please enter your environment name',
environment_config_tips: 'Please enter your environment config',
environment_description_tips: 'Please enter your environment description',
worker_group_tips: 'Please select worker group'
},
token: {
create_token: 'Create Token',
edit_token: 'Edit Token',
search_tips: 'Please enter keywords',
user: 'User',
user_tips: 'Please select user',
token: 'Token',
token_tips: 'Please enter your token',
expiration_time: 'Expiration Time',
expiration_time_tips: 'Please select expiration time',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?'
},
user: {
user_manage: 'User Manage',
create_user: 'Create User',
update_user: 'Update User',
delete_user: 'Delete User',
delete_confirm: 'Are you sure to delete?',
delete_confirm_tip:
'Deleting user is a dangerous operation,please be careful',
project: 'Project',
resource: 'Resource',
file_resource: 'File Resource',
udf_resource: 'UDF Resource',
datasource: 'Datasource',
udf: 'UDF Function',
authorize_project: 'Project Authorize',
authorize_resource: 'Resource Authorize',
authorize_datasource: 'Datasource Authorize',
authorize_udf: 'UDF Function Authorize',
username: 'Username',
username_exists: 'The username already exists',
username_rule_msg: 'Please enter username',
user_password: 'Please enter password',
user_password_rule_msg:
'Please enter a password containing letters and numbers with a length between 6 and 20',
user_type: 'User Type',
tenant_code: 'Tenant',
tenant_id_rule_msg: 'Please select tenant',
queue: 'Queue',
email: 'Email',
email_rule_msg: 'Please enter valid email',
phone: 'Phone',
phone_rule_msg: 'Please enter valid phone number',
state: 'State',
state_enabled: 'Enabled',
state_disabled: 'Disabled',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
authorize: 'Authorize',
save_error_msg: 'Failed to save, please retry',
delete_error_msg: 'Failed to delete, please retry',
auth_error_msg: 'Failed to authorize, please retry',
auth_success_msg: 'Authorize succeeded'
},
alarm_instance: {
search_input_tips: 'Please input the keywords',
alarm_instance_manage: 'Alarm instance manage',
alarm_instance: 'Alarm Instance',
alarm_instance_name: 'Alarm instance name',
alarm_instance_name_tips: 'Please enter alarm plugin instance name',
alarm_plugin_name: 'Alarm plugin name',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
submit: 'Submit',
create: 'Create',
select_plugin: 'Select plugin',
select_plugin_tips: 'Select Alarm plugin',
instance_parameter_exception: 'Instance parameter exception',
WebHook: 'WebHook',
webHook: 'WebHook',
IsEnableProxy: 'Enable Proxy',
Proxy: 'Proxy',
Port: 'Port',
User: 'User',
corpId: 'CorpId',
secret: 'Secret',
Secret: 'Secret',
users: 'Users',
userSendMsg: 'UserSendMsg',
agentId: 'AgentId',
showType: 'Show Type',
receivers: 'Receivers',
receiverCcs: 'ReceiverCcs',
serverHost: 'SMTP Host',
serverPort: 'SMTP Port',
sender: 'Sender',
enableSmtpAuth: 'SMTP Auth',
Password: 'Password',
starttlsEnable: 'SMTP STARTTLS Enable',
sslEnable: 'SMTP SSL Enable',
smtpSslTrust: 'SMTP SSL Trust',
url: 'URL',
requestType: 'Request Type',
headerParams: 'Headers',
bodyParams: 'Body',
contentField: 'Content Field',
Keyword: 'Keyword',
userParams: 'User Params',
path: 'Script Path',
type: 'Type',
sendType: 'Send Type',
username: 'Username',
botToken: 'Bot Token',
chatId: 'Channel Chat Id',
parseMode: 'Parse Mode'
},
k8s_namespace: {
create_namespace: 'Create Namespace',
edit_namespace: 'Edit Namespace',
search_tips: 'Please enter keywords',
k8s_namespace: 'K8S Namespace',
k8s_namespace_tips: 'Please enter k8s namespace',
k8s_cluster: 'K8S Cluster',
k8s_cluster_tips: 'Please enter k8s cluster',
owner: 'Owner',
owner_tips: 'Please enter owner',
tag: 'Tag',
tag_tips: 'Please enter tag',
limit_cpu: 'Limit CPU',
limit_cpu_tips: 'Please enter limit CPU',
limit_memory: 'Limit Memory',
limit_memory_tips: 'Please enter limit memory',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?'
}
}
const datasource = {
datasource: 'DataSource',
create_datasource: 'Create DataSource',
search_input_tips: 'Please input the keywords',
datasource_name: 'Datasource Name',
datasource_name_tips: 'Please enter datasource name',
datasource_user_name: 'Owner',
datasource_type: 'Datasource Type',
datasource_parameter: 'Datasource Parameter',
description: 'Description',
description_tips: 'Please enter description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
click_to_view: 'Click to view',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
create: 'Create',
edit: 'Edit',
success: 'Success',
test_connect: 'Test Connect',
ip: 'IP',
ip_tips: 'Please enter IP',
port: 'Port',
port_tips: 'Please enter port',
database_name: 'Database Name',
database_name_tips: 'Please enter database name',
oracle_connect_type: 'ServiceName or SID',
oracle_connect_type_tips: 'Please select serviceName or SID',
oracle_service_name: 'ServiceName',
oracle_sid: 'SID',
jdbc_connect_parameters: 'jdbc connect parameters',
principal_tips: 'Please enter Principal',
krb5_conf_tips:
'Please enter the kerberos authentication parameter java.security.krb5.conf',
keytab_username_tips:
'Please enter the kerberos authentication parameter login.user.keytab.username',
keytab_path_tips:
'Please enter the kerberos authentication parameter login.user.keytab.path',
format_tips: 'Please enter format',
connection_parameter: 'connection parameter',
user_name: 'User Name',
user_name_tips: 'Please enter your username',
user_password: 'Password',
user_password_tips: 'Please enter your password'
}
const data_quality = {
task_result: {
task_name: 'Task Name',
workflow_instance: 'Workflow Instance',
rule_type: 'Rule Type',
rule_name: 'Rule Name',
state: 'State',
actual_value: 'Actual Value',
excepted_value: 'Excepted Value',
check_type: 'Check Type',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'Failure Strategy',
excepted_value_type: 'Excepted Value Type',
error_output_path: 'Error Output Path',
username: 'Username',
create_time: 'Create Time',
update_time: 'Update Time',
undone: 'Undone',
success: 'Success',
failure: 'Failure',
single_table: 'Single Table',
single_table_custom_sql: 'Single Table Custom Sql',
multi_table_accuracy: 'Multi Table Accuracy',
multi_table_comparison: 'Multi Table Comparison',
expected_and_actual_or_expected: '(Expected - Actual) / Expected x 100%',
expected_and_actual: 'Expected - Actual',
actual_and_expected: 'Actual - Expected',
actual_or_expected: 'Actual / Expected x 100%'
},
rule: {
actions: 'Actions',
name: 'Rule Name',
type: 'Rule Type',
username: 'User Name',
create_time: 'Create Time',
update_time: 'Update Time',
input_item: 'Rule input item',
view_input_item: 'View input items',
input_item_title: 'Input item title',
input_item_placeholder: 'Input item placeholder',
input_item_type: 'Input item type',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
null_check: 'NullCheck',
custom_sql: 'Custom Sql',
single_table: 'Single Table',
single_table_custom_sql: 'Single Table Custom Sql',
multi_table_accuracy: 'Multi Table Accuracy',
multi_table_value_comparison: 'Multi Table Compare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
All: 'All',
FixValue: 'FixValue',
DailyAvg: 'DailyAvg',
WeeklyAvg: 'WeeklyAvg',
MonthlyAvg: 'MonthlyAvg',
Last7DayAvg: 'Last7DayAvg',
Last30DayAvg: 'Last30DayAvg',
SrcTableTotalRows: 'SrcTableTotalRows',
TargetTableTotalRows: 'TargetTableTotalRows'
}
}
const crontab = {
second: 'second',
minute: 'minute',
hour: 'hour',
day: 'day',
month: 'month',
year: 'year',
monday: 'Monday',
tuesday: 'Tuesday',
wednesday: 'Wednesday',
thursday: 'Thursday',
friday: 'Friday',
saturday: 'Saturday',
sunday: 'Sunday',
every_second: 'Every second',
every: 'Every',
second_carried_out: 'second carried out',
second_start: 'Start',
specific_second: 'Specific second(multiple)',
specific_second_tip: 'Please enter a specific second',
cycle_from: 'Cycle from',
to: 'to',
every_minute: 'Every minute',
minute_carried_out: 'minute carried out',
minute_start: 'Start',
specific_minute: 'Specific minute(multiple)',
specific_minute_tip: 'Please enter a specific minute',
every_hour: 'Every hour',
hour_carried_out: 'hour carried out',
hour_start: 'Start',
specific_hour: 'Specific hour(multiple)',
specific_hour_tip: 'Please enter a specific hour',
every_day: 'Every day',
week_carried_out: 'week carried out',
start: 'Start',
day_carried_out: 'day carried out',
day_start: 'Start',
specific_week: 'Specific day of the week(multiple)',
specific_week_tip: 'Please enter a specific week',
specific_day: 'Specific days(multiple)',
specific_day_tip: 'Please enter a days',
last_day_of_month: 'On the last day of the month',
last_work_day_of_month: 'On the last working day of the month',
last_of_month: 'At the last of this month',
before_end_of_month: 'Before the end of this month',
recent_business_day_to_month:
'The most recent business day (Monday to Friday) to this month',
in_this_months: 'In this months',
every_month: 'Every month',
month_carried_out: 'month carried out',
month_start: 'Start',
specific_month: 'Specific months(multiple)',
specific_month_tip: 'Please enter a months',
every_year: 'Every year',
year_carried_out: 'year carried out',
year_start: 'Start',
specific_year: 'Specific year(multiple)',
specific_year_tip: 'Please enter a year',
one_hour: 'hour',
one_day: 'day'
}
export default {
login,
modal,
theme,
userDropdown,
menu,
home,
password,
profile,
monitor,
resource,
project,
security,
datasource,
data_quality,
crontab
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,497 | [Feature][UI Next][V1.0.0-Alpha] Dependent tasks can re-run automatically in the case of complement | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add a select button to control the switch of complement dependent process.
Api:
Add param ```complementDependentMode``` in ```projects/{projectCode}/executors/start-process-instance```
Enum:
1. OFF_MODE (default, not required)
2. ALL_DEPENDENT
### Use case
#8373
### Related issues
#8373
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8497 | https://github.com/apache/dolphinscheduler/pull/8739 | 1d7ee2c5c444b538f3606e0ba4b22d64f0c2686d | aa5392529bb8d2ba7b4b73a9527adf713f8884c8 | "2022-02-23T04:41:14Z" | java | "2022-03-07T10:05:58Z" | dolphinscheduler-ui-next/src/locales/modules/zh_CN.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const login = {
test: '测试',
userName: '用户名',
userName_tips: '请输入用户名',
userPassword: '密码',
userPassword_tips: '请输入密码',
login: '登录'
}
const modal = {
cancel: '取消',
confirm: '确定'
}
const theme = {
light: '浅色',
dark: '深色'
}
const userDropdown = {
profile: '用户信息',
password: '密码管理',
logout: '退出登录'
}
const menu = {
home: '首页',
project: '项目管理',
resources: '资源中心',
datasource: '数据源中心',
monitor: '监控中心',
security: '安全中心',
project_overview: '项目概览',
workflow_relation: '工作流关系',
workflow: '工作流',
workflow_definition: '工作流定义',
workflow_instance: '工作流实例',
task: '任务',
task_instance: '任务实例',
task_definition: '任务定义',
file_manage: '文件管理',
udf_manage: 'UDF管理',
resource_manage: '资源管理',
function_manage: '函数管理',
service_manage: '服务管理',
master: 'Master',
worker: 'Worker',
db: 'DB',
statistical_manage: '统计管理',
statistics: 'Statistics',
audit_log: '审计日志',
tenant_manage: '租户管理',
user_manage: '用户管理',
alarm_group_manage: '告警组管理',
alarm_instance_manage: '告警实例管理',
worker_group_manage: 'Worker分组管理',
yarn_queue_manage: 'Yarn队列管理',
environment_manage: '环境管理',
k8s_namespace_manage: 'K8S命名空间管理',
token_manage: '令牌管理',
task_group_manage: '任务组管理',
task_group_option: '任务组配置',
task_group_queue: '任务组队列',
data_quality: '数据质量',
task_result: '任务结果',
rule: '规则管理'
}
const home = {
task_state_statistics: '任务状态统计',
process_state_statistics: '流程状态统计',
process_definition_statistics: '流程定义统计',
number: '数量',
state: '状态',
submitted_success: '提交成功',
running_execution: '正在运行',
ready_pause: '准备暂停',
pause: '暂停',
ready_stop: '准备停止',
stop: '停止',
failure: '失败',
success: '成功',
need_fault_tolerance: '需要容错',
kill: 'KILL',
waiting_thread: '等待线程',
waiting_depend: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
serial_wait: '串行等待'
}
const password = {
edit_password: '修改密码',
password: '密码',
confirm_password: '确认密码',
password_tips: '请输入密码',
confirm_password_tips: '请输入确认密码',
two_password_entries_are_inconsistent: '两次密码输入不一致',
submit: '提交'
}
const profile = {
profile: '用户信息',
edit: '编辑',
username: '用户名',
email: '邮箱',
phone: '手机',
state: '状态',
permission: '权限',
create_time: '创建时间',
update_time: '更新时间',
administrator: '管理员',
ordinary_user: '普通用户',
edit_profile: '编辑用户',
username_tips: '请输入用户名',
email_tips: '请输入邮箱',
email_correct_tips: '请输入正确格式的邮箱',
phone_tips: '请输入手机号',
state_tips: '请选择状态',
enable: '启用',
disable: '禁用'
}
const monitor = {
master: {
cpu_usage: '处理器使用量',
memory_usage: '内存使用量',
load_average: '平均负载量',
create_time: '创建时间',
last_heartbeat_time: '最后心跳时间',
directory_detail: '目录详情',
host: '主机',
directory: '注册目录'
},
worker: {
cpu_usage: '处理器使用量',
memory_usage: '内存使用量',
load_average: '平均负载量',
create_time: '创建时间',
last_heartbeat_time: '最后心跳时间',
directory_detail: '目录详情',
host: '主机',
directory: '注册目录'
},
db: {
health_state: '健康状态',
max_connections: '最大连接数',
threads_connections: '当前连接数',
threads_running_connections: '数据库当前活跃连接数'
},
statistics: {
command_number_of_waiting_for_running: '待执行的命令数',
failure_command_number: '执行失败的命令数',
tasks_number_of_waiting_running: '待运行任务数',
task_number_of_ready_to_kill: '待杀死任务数'
},
audit_log: {
user_name: '用户名称',
resource_type: '资源类型',
project_name: '项目名称',
operation_type: '操作类型',
create_time: '创建时间',
start_time: '开始时间',
end_time: '结束时间',
user_audit: '用户管理审计',
project_audit: '项目管理审计',
create: '创建',
update: '更新',
delete: '删除',
read: '读取'
}
}
const resource = {
file: {
file_manage: '文件管理',
create_folder: '创建文件夹',
create_file: '创建文件',
upload_files: '上传文件',
enter_keyword_tips: '请输入关键词',
name: '名称',
user_name: '所属用户',
whether_directory: '是否文件夹',
file_name: '文件名称',
description: '描述',
size: '大小',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
rename: '重命名',
download: '下载',
delete: '删除',
yes: '是',
no: '否',
folder_name: '文件夹名称',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述',
enter_content_tips: '请输入资源内容',
enter_suffix_tips: '请输入文件后缀',
file_format: '文件格式',
file_content: '文件内容',
delete_confirm: '确定删除吗?',
confirm: '确定',
cancel: '取消',
success: '成功',
file_details: '文件详情',
return: '返回',
save: '保存'
},
udf: {
udf_resources: 'UDF资源',
create_folder: '创建文件夹',
upload_udf_resources: '上传UDF资源',
udf_source_name: 'UDF资源名称',
whether_directory: '是否文件夹',
file_name: '文件名称',
file_size: '文件大小',
description: '描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
yes: '是',
no: '否',
edit: '编辑',
download: '下载',
delete: '删除',
success: '成功',
folder_name: '文件夹名称',
upload: '上传',
upload_files: '上传文件',
file_upload: '文件上传',
delete_confirm: '确定删除吗?',
enter_keyword_tips: '请输入关键词',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述'
},
function: {
udf_function: 'UDF函数',
create_udf_function: '创建UDF函数',
edit_udf_function: '编辑UDF函数',
udf_function_name: 'UDF函数名称',
class_name: '类名',
type: '类型',
description: '描述',
jar_package: 'jar包',
update_time: '更新时间',
operation: '操作',
rename: '重命名',
edit: '编辑',
delete: '删除',
success: '成功',
package_name: '包名类名',
udf_resources: 'UDF资源',
instructions: '使用说明',
upload_resources: '上传资源',
udf_resources_directory: 'UDF资源目录',
delete_confirm: '确定删除吗?',
enter_keyword_tips: '请输入关键词',
enter_udf_unction_name_tips: '请输入UDF函数名称',
enter_package_name_tips: '请输入包名类名',
enter_select_udf_resources_tips: '请选择UDF资源',
enter_select_udf_resources_directory_tips: '请选择UDF资源目录',
enter_instructions_tips: '请输入使用说明',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述'
},
task_group_option: {
manage: '任务组管理',
option: '任务组配置',
create: '创建任务组',
edit: '编辑任务组',
delete: '删除任务组',
view_queue: '查看任务组队列',
switch_status: '切换任务组状态',
code: '任务组编号',
name: '任务组名称',
project_name: '项目名称',
resource_pool_size: '资源容量',
resource_used_pool_size: '已用资源',
desc: '描述信息',
status: '任务组状态',
enable_status: '启用',
disable_status: '不可用',
please_enter_name: '请输入任务组名称',
please_enter_desc: '请输入任务组描述',
please_enter_resource_pool_size: '请输入资源容量大小',
resource_pool_size_be_a_number: '资源容量大小必须大于等于1的数值',
please_select_project: '请选择项目',
create_time: '创建时间',
update_time: '更新时间',
actions: '操作',
please_enter_keywords: '请输入搜索关键词'
},
task_group_queue: {
actions: '操作',
task_name: '任务名称',
task_group_name: '任务组名称',
project_name: '项目名称',
process_name: '工作流名称',
process_instance_name: '工作流实例',
queue: '任务组队列',
priority: '组内优先级',
priority_be_a_number: '优先级必须是大于等于0的数值',
force_starting_status: '是否强制启动',
in_queue: '是否排队中',
task_status: '任务状态',
view_task_group_queue: '查看任务组队列',
the_status_of_waiting: '等待入队',
the_status_of_queuing: '排队中',
the_status_of_releasing: '已释放',
modify_priority: '修改优先级',
start_task: '强制启动',
priority_not_empty: '优先级不能为空',
priority_must_be_number: '优先级必须是数值',
please_select_task_name: '请选择节点名称',
create_time: '创建时间',
update_time: '更新时间',
edit_priority: '修改优先级'
}
}
const project = {
list: {
create_project: '创建项目',
edit_project: '编辑项目',
project_list: '项目列表',
project_tips: '请输入项目名称',
description_tips: '请输入项目描述',
username_tips: '请输入所属用户',
project_name: '项目名称',
project_description: '项目描述',
owned_users: '所属用户',
workflow_define_count: '工作流定义数',
process_instance_running_count: '正在运行的流程数',
description: '描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
confirm: '确定',
cancel: '取消',
delete_confirm: '确定删除吗?'
},
workflow: {
workflow_relation: '工作流关系',
create_workflow: '创建工作流',
import_workflow: '导入工作流',
workflow_name: '工作流名称',
current_selection: '当前选择',
online: '已上线',
offline: '已下线',
refresh: '刷新',
show_hide_label: '显示 / 隐藏标签',
workflow_offline: '工作流下线',
schedule_offline: '调度下线',
schedule_start_time: '定时开始时间',
schedule_end_time: '定时结束时间',
crontab_expression: 'Crontab',
workflow_publish_status: '工作流上线状态',
schedule_publish_status: '定时状态',
workflow_definition: '工作流定义',
workflow_instance: '工作流实例',
status: '状态',
create_time: '创建时间',
update_time: '更新时间',
description: '描述',
create_user: '创建用户',
modify_user: '修改用户',
operation: '操作',
edit: '编辑',
confirm: '确定',
cancel: '取消',
start: '运行',
timing: '定时',
timezone: '时区',
up_line: '上线',
down_line: '下线',
copy_workflow: '复制工作流',
cron_manage: '定时管理',
delete: '删除',
tree_view: '工作流树形图',
tree_limit: '限制大小',
export: '导出',
version_info: '版本信息',
version: '版本',
file_upload: '文件上传',
upload_file: '上传文件',
upload: '上传',
file_name: '文件名称',
success: '成功',
set_parameters_before_starting: '启动前请先设置参数',
set_parameters_before_timing: '定时前请先设置参数',
start_and_stop_time: '起止时间',
next_five_execution_times: '接下来五次执行时间',
execute_time: '执行时间',
failure_strategy: '失败策略',
notification_strategy: '通知策略',
workflow_priority: '流程优先级',
worker_group: 'Worker分组',
environment_name: '环境名称',
alarm_group: '告警组',
complement_data: '补数',
startup_parameter: '启动参数',
whether_dry_run: '是否空跑',
continue: '继续',
end: '结束',
none_send: '都不发',
success_send: '成功发',
failure_send: '失败发',
all_send: '成功或失败都发',
whether_complement_data: '是否是补数',
schedule_date: '调度日期',
mode_of_execution: '执行方式',
serial_execution: '串行执行',
parallel_execution: '并行执行',
parallelism: '并行度',
custom_parallelism: '自定义并行度',
please_enter_parallelism: '请输入并行度',
please_choose: '请选择',
start_time: '开始时间',
end_time: '结束时间',
crontab: 'Crontab',
delete_confirm: '确定删除吗?',
enter_name_tips: '请输入名称',
switch_version: '切换到该版本',
confirm_switch_version: '确定切换到该版本吗?',
current_version: '当前版本',
run_type: '运行类型',
scheduling_time: '调度时间',
duration: '运行时长',
run_times: '运行次数',
fault_tolerant_sign: '容错标识',
dry_run_flag: '空跑标识',
executor: '执行用户',
host: 'Host',
start_process: '启动工作流',
execute_from_the_current_node: '从当前节点开始执行',
recover_tolerance_fault_process: '恢复被容错的工作流',
resume_the_suspension_process: '恢复运行流程',
execute_from_the_failed_nodes: '从失败节点开始执行',
scheduling_execution: '调度执行',
rerun: '重跑',
stop: '停止',
pause: '暂停',
recovery_waiting_thread: '恢复等待线程',
recover_serial_wait: '串行恢复',
recovery_suspend: '恢复运行',
recovery_failed: '恢复失败',
gantt: '甘特图',
name: '名称',
all_status: '全部状态',
submit_success: '提交成功',
running: '正在运行',
ready_to_pause: '准备暂停',
ready_to_stop: '准备停止',
failed: '失败',
need_fault_tolerance: '需要容错',
kill: 'Kill',
waiting_for_thread: '等待线程',
waiting_for_dependence: '等待依赖',
waiting_for_dependency_to_complete: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
serial_wait: '串行等待',
executing: '正在执行',
startup_type: '启动类型',
complement_range: '补数范围',
parameters_variables: '参数变量',
global_parameters: '全局参数',
local_parameters: '局部参数',
type: '类型',
retry_count: '重试次数',
submit_time: '提交时间',
refresh_status_succeeded: '刷新状态成功',
view_log: '查看日志',
update_log_success: '更新日志成功',
no_more_log: '暂无更多日志',
no_log: '暂无日志',
loading_log: '正在努力请求日志中...',
close: '关闭',
download_log: '下载日志',
refresh_log: '刷新日志',
enter_full_screen: '进入全屏',
cancel_full_screen: '取消全屏',
task_state: '任务状态'
},
task: {
task_name: '任务名称',
task_type: '任务类型',
create_task: '创建任务',
workflow_instance: '工作流实例',
workflow_name: '工作流名称',
workflow_name_tips: '请选择工作流名称',
workflow_state: '工作流状态',
version: '版本',
current_version: '当前版本',
switch_version: '切换到该版本',
confirm_switch_version: '确定切换到该版本吗?',
description: '描述',
move: '移动',
upstream_tasks: '上游任务',
executor: '执行用户',
node_type: '节点类型',
state: '状态',
submit_time: '提交时间',
start_time: '开始时间',
create_time: '创建时间',
update_time: '更新时间',
end_time: '结束时间',
duration: '运行时间',
retry_count: '重试次数',
dry_run_flag: '空跑标识',
host: '主机',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?',
submitted_success: '提交成功',
running_execution: '正在运行',
ready_pause: '准备暂停',
pause: '暂停',
ready_stop: '准备停止',
stop: '停止',
failure: '失败',
success: '成功',
need_fault_tolerance: '需要容错',
kill: 'KILL',
waiting_thread: '等待线程',
waiting_depend: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
view_log: '查看日志',
download_log: '下载日志'
},
dag: {
create: '创建工作流',
search: '搜索',
download_png: '下载工作流图片',
fullscreen_open: '全屏',
fullscreen_close: '退出全屏',
save: '保存',
close: '关闭',
format: '格式化',
refresh_dag_status: '刷新DAG状态',
layout_type: '布局类型',
grid_layout: '网格布局',
dagre_layout: '层次布局',
rows: '行数',
cols: '列数',
copy_success: '复制成功',
workflow_name: '工作流名称',
description: '描述',
tenant: '租户',
timeout_alert: '超时告警',
global_variables: '全局变量',
basic_info: '基本信息',
minute: '分',
key: '键',
value: '值',
success: '成功',
delete_cell: '删除选中的线或节点',
online_directly: '是否上线流程定义',
dag_name_empty: 'DAG图名称不能为空',
positive_integer: '请输入大于 0 的正整数',
prop_empty: '自定义参数prop不能为空',
prop_repeat: 'prop中有重复',
node_not_created: '未创建节点保存失败',
copy_name: '复制名称',
view_variables: '查看变量',
startup_parameter: '启动参数'
},
node: {
current_node_settings: '当前节点设置',
instructions: '使用说明',
view_history: '查看历史',
view_log: '查看日志',
enter_this_child_node: '进入该子节点',
name: '节点名称',
name_tips: '请输入名称(必填)',
task_type: '任务类型',
task_type_tips: '请选择任务类型(必选)',
process_name: '工作流名称',
process_name_tips: '请选择工作流(必选)',
child_node: '子节点',
enter_child_node: '进入该子节点',
run_flag: '运行标志',
normal: '正常',
prohibition_execution: '禁止执行',
description: '描述',
description_tips: '请输入描述',
task_priority: '任务优先级',
worker_group: 'Worker分组',
worker_group_tips: '该Worker分组已经不存在,请选择正确的Worker分组!',
environment_name: '环境名称',
task_group_name: '任务组名称',
task_group_queue_priority: '组内优先级',
number_of_failed_retries: '失败重试次数',
times: '次',
failed_retry_interval: '失败重试间隔',
minute: '分',
delay_execution_time: '延时执行时间',
state: '状态',
branch_flow: '分支流转',
cancel: '取消',
loading: '正在努力加载中...',
confirm: '确定',
success: '成功',
failed: '失败',
backfill_tips: '新创建子工作流还未执行,不能进入子工作流',
task_instance_tips: '该任务还未执行,不能进入子工作流',
branch_tips: '成功分支流转和失败分支流转不能选择同一个节点',
timeout_alarm: '超时告警',
timeout_strategy: '超时策略',
timeout_strategy_tips: '超时策略必须选一个',
timeout_failure: '超时失败',
timeout_period: '超时时长',
timeout_period_tips: '超时时长必须为正整数',
script: '脚本',
script_tips: '请输入脚本(必填)',
resources: '资源',
resources_tips: '请选择资源',
no_resources_tips: '请删除所有未授权或已删除资源',
useless_resources_tips: '未授权或已删除资源',
custom_parameters: '自定义参数',
copy_failed: '该浏览器不支持自动复制',
prop_tips: 'prop(必填)',
prop_repeat: 'prop中有重复',
value_tips: 'value(选填)',
value_required_tips: 'value(必填)',
pre_tasks: '前置任务',
program_type: '程序类型',
spark_version: 'Spark版本',
main_class: '主函数的Class',
main_class_tips: '请填写主函数的Class',
main_package: '主程序包',
main_package_tips: '请选择主程序包',
deploy_mode: '部署方式',
app_name: '任务名称',
app_name_tips: '请输入任务名称(选填)',
driver_cores: 'Driver核心数',
driver_cores_tips: '请输入Driver核心数',
driver_memory: 'Driver内存数',
driver_memory_tips: '请输入Driver内存数',
executor_number: 'Executor数量',
executor_number_tips: '请输入Executor数量',
executor_memory: 'Executor内存数',
executor_memory_tips: '请输入Executor内存数',
executor_cores: 'Executor核心数',
executor_cores_tips: '请输入Executor核心数',
main_arguments: '主程序参数',
main_arguments_tips: '请输入主程序参数',
option_parameters: '选项参数',
option_parameters_tips: '请输入选项参数',
positive_integer_tips: '应为正整数',
flink_version: 'Flink版本',
job_manager_memory: 'JobManager内存数',
job_manager_memory_tips: '请输入JobManager内存数',
task_manager_memory: 'TaskManager内存数',
task_manager_memory_tips: '请输入TaskManager内存数',
slot_number: 'Slot数量',
slot_number_tips: '请输入Slot数量',
parallelism: '并行度',
custom_parallelism: '自定义并行度',
parallelism_tips: '请输入并行度',
parallelism_number_tips: '并行度必须为正整数',
parallelism_complement_tips:
'如果存在大量任务需要补数时,可以利用自定义并行度将补数的任务线程设置成合理的数值,避免对服务器造成过大的影响',
task_manager_number: 'TaskManager数量',
task_manager_number_tips: '请输入TaskManager数量',
http_url: '请求地址',
http_url_tips: '请填写请求地址(必填)',
http_method: '请求类型',
http_parameters: '请求参数',
http_check_condition: '校验条件',
http_condition: '校验内容',
http_condition_tips: '请填写校验内容',
timeout_settings: '超时设置',
connect_timeout: '连接超时',
ms: '毫秒',
socket_timeout: 'Socket超时',
status_code_default: '默认响应码200',
status_code_custom: '自定义响应码',
body_contains: '内容包含',
body_not_contains: '内容不包含',
http_parameters_position: '参数位置',
target_task_name: '目标任务名',
target_task_name_tips: '请输入Pigeon任务名',
datasource_type: '数据源类型',
datasource_instances: '数据源实例',
sql_type: 'SQL类型',
sql_type_query: '查询',
sql_type_non_query: '非查询',
sql_statement: 'SQL语句',
pre_sql_statement: '前置SQL语句',
post_sql_statement: '后置SQL语句',
sql_input_placeholder: '请输入非查询SQL语句',
sql_empty_tips: '语句不能为空',
procedure_method: 'SQL语句',
procedure_method_tips: '请输入存储脚本',
procedure_method_snippet:
'--请输入存储脚本 \n\n--调用存储过程: call <procedure-name>[(<arg1>,<arg2>, ...)] \n\n--调用存储函数:?= call <procedure-name>[(<arg1>,<arg2>, ...)]',
start: '运行',
edit: '编辑',
copy: '复制节点',
delete: '删除',
custom_job: '自定义任务',
custom_script: '自定义脚本',
sqoop_job_name: '任务名称',
sqoop_job_name_tips: '请输入任务名称(必填)',
direct: '流向',
hadoop_custom_params: 'Hadoop参数',
sqoop_advanced_parameters: 'Sqoop参数',
data_source: '数据来源',
type: '类型',
datasource: '数据源',
datasource_tips: '请选择数据源',
model_type: '模式',
form: '表单',
table: '表名',
table_tips: '请输入Mysql表名(必填)',
column_type: '列类型',
all_columns: '全表导入',
some_columns: '选择列',
column: '列',
column_tips: '请输入列名,用 , 隔开',
database: '数据库',
database_tips: '请输入Hive数据库(必填)',
hive_table_tips: '请输入Hive表名(必填)',
hive_partition_keys: 'Hive 分区键',
hive_partition_keys_tips: '请输入分区键',
hive_partition_values: 'Hive 分区值',
hive_partition_values_tips: '请输入分区值',
export_dir: '数据源路径',
export_dir_tips: '请输入数据源路径(必填)',
sql_statement_tips: 'SQL语句(必填)',
map_column_hive: 'Hive类型映射',
map_column_java: 'Java类型映射',
data_target: '数据目的',
create_hive_table: '是否创建新表',
drop_delimiter: '是否删除分隔符',
over_write_src: '是否覆盖数据源',
hive_target_dir: 'Hive目标路径',
hive_target_dir_tips: '请输入Hive临时目录',
replace_delimiter: '替换分隔符',
replace_delimiter_tips: '请输入替换分隔符',
target_dir: '目标路径',
target_dir_tips: '请输入目标路径(必填)',
delete_target_dir: '是否删除目录',
compression_codec: '压缩类型',
file_type: '保存格式',
fields_terminated: '列分隔符',
fields_terminated_tips: '请输入列分隔符',
lines_terminated: '行分隔符',
lines_terminated_tips: '请输入行分隔符',
is_update: '是否更新',
update_key: '更新列',
update_key_tips: '请输入更新列',
update_mode: '更新类型',
only_update: '只更新',
allow_insert: '无更新便插入',
concurrency: '并发度',
concurrency_tips: '请输入并发度',
sea_tunnel_master: 'Master',
sea_tunnel_master_url: 'Master URL',
sea_tunnel_queue: '队列',
sea_tunnel_master_url_tips: '请直接填写地址,例如:127.0.0.1:7077',
switch_condition: '条件',
switch_branch_flow: '分支流转',
and: '且',
or: '或',
datax_custom_template: '自定义模板',
datax_json_template: 'JSON',
datax_target_datasource_type: '目标源类型',
datax_target_database: '目标源实例',
datax_target_table: '目标表',
datax_target_table_tips: '请输入目标表名',
datax_target_database_pre_sql: '目标库前置SQL',
datax_target_database_post_sql: '目标库后置SQL',
datax_non_query_sql_tips: '请输入非查询SQL语句',
datax_job_speed_byte: '限流(字节数)',
datax_job_speed_byte_info: '(KB,0代表不限制)',
datax_job_speed_record: '限流(记录数)',
datax_job_speed_record_info: '(0代表不限制)',
datax_job_runtime_memory: '运行内存',
datax_job_runtime_memory_xms: '最小内存',
datax_job_runtime_memory_xmx: '最大内存',
datax_job_runtime_memory_unit: 'G',
current_hour: '当前小时',
last_1_hour: '前1小时',
last_2_hour: '前2小时',
last_3_hour: '前3小时',
last_24_hour: '前24小时',
today: '今天',
last_1_days: '昨天',
last_2_days: '前两天',
last_3_days: '前三天',
last_7_days: '前七天',
this_week: '本周',
last_week: '上周',
last_monday: '上周一',
last_tuesday: '上周二',
last_wednesday: '上周三',
last_thursday: '上周四',
last_friday: '上周五',
last_saturday: '上周六',
last_sunday: '上周日',
this_month: '本月',
last_month: '上月',
last_month_begin: '上月初',
last_month_end: '上月末',
month: '月',
week: '周',
day: '日',
hour: '时',
add_dependency: '添加依赖',
waiting_dependent_start: '等待依赖启动',
check_interval: '检查间隔',
waiting_dependent_complete: '等待依赖完成',
rule_name: '规则名称',
null_check: '空值检测',
custom_sql: '自定义SQL',
multi_table_accuracy: '多表准确性',
multi_table_value_comparison: '两表值比对',
field_length_check: '字段长度校验',
uniqueness_check: '唯一性校验',
regexp_check: '正则表达式',
timeliness_check: '及时性校验',
enumeration_check: '枚举值校验',
table_count_check: '表行数校验',
src_connector_type: '源数据类型',
src_datasource_id: '源数据源',
src_table: '源数据表',
src_filter: '源表过滤条件',
src_field: '源表检测列',
statistics_name: '实际值名',
check_type: '校验方式',
operator: '校验操作符',
threshold: '阈值',
failure_strategy: '失败策略',
target_connector_type: '目标数据类型',
target_datasource_id: '目标数据源',
target_table: '目标数据表',
target_filter: '目标表过滤条件',
mapping_columns: 'ON语句',
statistics_execute_sql: '实际值计算SQL',
comparison_name: '期望值名',
comparison_execute_sql: '期望值计算SQL',
comparison_type: '期望值类型',
writer_connector_type: '输出数据类型',
writer_datasource_id: '输出数据源',
target_field: '目标表检测列',
field_length: '字段长度限制',
logic_operator: '逻辑操作符',
regexp_pattern: '正则表达式',
deadline: '截止时间',
datetime_format: '时间格式',
enum_list: '枚举值列表',
begin_time: '起始时间',
fix_value: '固定值',
required: '必填'
}
}
const security = {
tenant: {
tenant_manage: '租户管理',
create_tenant: '创建租户',
search_tips: '请输入关键词',
tenant_code: '操作系统租户',
description: '描述',
queue_name: '队列',
create_time: '创建时间',
update_time: '更新时间',
actions: '操作',
edit_tenant: '编辑租户',
tenant_code_tips: '请输入操作系统租户',
queue_name_tips: '请选择队列',
description_tips: '请输入描述',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除'
},
alarm_group: {
create_alarm_group: '创建告警组',
edit_alarm_group: '编辑告警组',
search_tips: '请输入关键词',
alert_group_name_tips: '请输入告警组名称',
alarm_plugin_instance: '告警组实例',
alarm_plugin_instance_tips: '请选择告警组实例',
alarm_group_description_tips: '请输入告警组描述',
alert_group_name: '告警组名称',
alarm_group_description: '告警组描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除'
},
worker_group: {
create_worker_group: '创建Worker分组',
edit_worker_group: '编辑Worker分组',
search_tips: '请输入关键词',
operation: '操作',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除',
group_name: '分组名称',
group_name_tips: '请输入分组名称',
worker_addresses: 'Worker地址',
worker_addresses_tips: '请选择Worker地址',
create_time: '创建时间',
update_time: '更新时间'
},
yarn_queue: {
create_queue: '创建队列',
edit_queue: '编辑队列',
search_tips: '请输入关键词',
queue_name: '队列名',
queue_value: '队列值',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
queue_name_tips: '请输入队列名',
queue_value_tips: '请输入队列值'
},
environment: {
create_environment: '创建环境',
edit_environment: '编辑环境',
search_tips: '请输入关键词',
edit: '编辑',
delete: '删除',
environment_name: '环境名称',
environment_config: '环境配置',
environment_desc: '环境描述',
worker_groups: 'Worker分组',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
delete_confirm: '确定删除吗?',
environment_name_tips: '请输入环境名',
environment_config_tips: '请输入环境配置',
environment_description_tips: '请输入环境描述',
worker_group_tips: '请选择Worker分组'
},
token: {
create_token: '创建令牌',
edit_token: '编辑令牌',
search_tips: '请输入关键词',
user: '用户',
user_tips: '请选择用户',
token: '令牌',
token_tips: '请输入令牌',
expiration_time: '失效时间',
expiration_time_tips: '请选择失效时间',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?'
},
user: {
user_manage: '用户管理',
create_user: '创建用户',
update_user: '更新用户',
delete_user: '删除用户',
delete_confirm: '确定删除吗?',
delete_confirm_tip: '删除用户属于危险操作,请谨慎操作!',
project: '项目',
resource: '资源',
file_resource: '文件资源',
udf_resource: 'UDF资源',
datasource: '数据源',
udf: 'UDF函数',
authorize_project: '项目授权',
authorize_resource: '资源授权',
authorize_datasource: '数据源授权',
authorize_udf: 'UDF函数授权',
username: '用户名',
username_exists: '用户名已存在',
username_rule_msg: '请输入用户名',
user_password: '密码',
user_password_rule_msg: '请输入包含字母和数字,长度在6~20之间的密码',
user_type: '用户类型',
tenant_code: '租户',
tenant_id_rule_msg: '请选择租户',
queue: '队列',
email: '邮件',
email_rule_msg: '请输入正确的邮箱',
phone: '手机',
phone_rule_msg: '请输入正确的手机号',
state: '状态',
state_enabled: '启用',
state_disabled: '停用',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
authorize: '授权',
save_error_msg: '保存失败,请重试',
delete_error_msg: '删除失败,请重试',
auth_error_msg: '授权失败,请重试',
auth_success_msg: '授权成功'
},
alarm_instance: {
search_input_tips: '请输入关键字',
alarm_instance_manage: '告警实例管理',
alarm_instance: '告警实例',
alarm_instance_name: '告警实例名称',
alarm_instance_name_tips: '请输入告警实例名称',
alarm_plugin_name: '告警插件名称',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
confirm: '确定',
cancel: '取消',
submit: '提交',
create: '创建',
select_plugin: '选择插件',
select_plugin_tips: '请选择告警插件',
instance_parameter_exception: '实例参数异常',
WebHook: 'Web钩子',
webHook: 'Web钩子',
IsEnableProxy: '启用代理',
Proxy: '代理',
Port: '端口',
User: '用户',
corpId: '企业ID',
secret: '密钥',
Secret: '密钥',
users: '群员',
userSendMsg: '群员信息',
agentId: '应用ID',
showType: '内容展示类型',
receivers: '收件人',
receiverCcs: '抄送人',
serverHost: 'SMTP服务器',
serverPort: 'SMTP端口',
sender: '发件人',
enableSmtpAuth: '请求认证',
Password: '密码',
starttlsEnable: 'STARTTLS连接',
sslEnable: 'SSL连接',
smtpSslTrust: 'SSL证书信任',
url: 'URL',
requestType: '请求方式',
headerParams: '请求头',
bodyParams: '请求体',
contentField: '内容字段',
Keyword: '关键词',
userParams: '自定义参数',
path: '脚本路径',
type: '类型',
sendType: '发送类型',
username: '用户名',
botToken: '机器人Token',
chatId: '频道ID',
parseMode: '解析类型'
},
k8s_namespace: {
create_namespace: '创建命名空间',
edit_namespace: '编辑命名空间',
search_tips: '请输入关键词',
k8s_namespace: 'K8S命名空间',
k8s_namespace_tips: '请输入k8s命名空间',
k8s_cluster: 'K8S集群',
k8s_cluster_tips: '请输入k8s集群',
owner: '负责人',
owner_tips: '请输入负责人',
tag: '标签',
tag_tips: '请输入标签',
limit_cpu: '最大CPU',
limit_cpu_tips: '请输入最大CPU',
limit_memory: '最大内存',
limit_memory_tips: '请输入最大内存',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?'
}
}
const datasource = {
datasource: '数据源',
create_datasource: '创建数据源',
search_input_tips: '请输入关键字',
datasource_name: '数据源名称',
datasource_name_tips: '请输入数据源名称',
datasource_user_name: '所属用户',
datasource_type: '数据源类型',
datasource_parameter: '数据源参数',
description: '描述',
description_tips: '请输入描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
click_to_view: '点击查看',
delete: '删除',
confirm: '确定',
cancel: '取消',
create: '创建',
edit: '编辑',
success: '成功',
test_connect: '测试连接',
ip: 'IP主机名',
ip_tips: '请输入IP主机名',
port: '端口',
port_tips: '请输入端口',
database_name: '数据库名',
database_name_tips: '请输入数据库名',
oracle_connect_type: '服务名或SID',
oracle_connect_type_tips: '请选择服务名或SID',
oracle_service_name: '服务名',
oracle_sid: 'SID',
jdbc_connect_parameters: 'jdbc连接参数',
principal_tips: '请输入Principal',
krb5_conf_tips: '请输入kerberos认证参数 java.security.krb5.conf',
keytab_username_tips: '请输入kerberos认证参数 login.user.keytab.username',
keytab_path_tips: '请输入kerberos认证参数 login.user.keytab.path',
format_tips: '请输入格式为',
connection_parameter: '连接参数',
user_name: '用户名',
user_name_tips: '请输入用户名',
user_password: '密码',
user_password_tips: '请输入密码'
}
const data_quality = {
task_result: {
task_name: '任务名称',
workflow_instance: '工作流实例',
rule_type: '规则类型',
rule_name: '规则名称',
state: '状态',
actual_value: '实际值',
excepted_value: '期望值',
check_type: '检测类型',
operator: '操作符',
threshold: '阈值',
failure_strategy: '失败策略',
excepted_value_type: '期望值类型',
error_output_path: '错误数据路径',
username: '用户名',
create_time: '创建时间',
update_time: '更新时间',
undone: '未完成',
success: '成功',
failure: '失败',
single_table: '单表检测',
single_table_custom_sql: '自定义SQL',
multi_table_accuracy: '多表准确性',
multi_table_comparison: '两表值对比',
expected_and_actual_or_expected: '(期望值-实际值)/实际值 x 100%',
expected_and_actual: '期望值-实际值',
actual_and_expected: '实际值-期望值',
actual_or_expected: '实际值/期望值 x 100%'
},
rule: {
actions: '操作',
name: '规则名称',
type: '规则类型',
username: '用户名',
create_time: '创建时间',
update_time: '更新时间',
input_item: '规则输入项',
view_input_item: '查看规则输入项信息',
input_item_title: '输入项标题',
input_item_placeholder: '输入项占位符',
input_item_type: '输入项类型',
src_connector_type: '源数据类型',
src_datasource_id: '源数据源',
src_table: '源数据表',
src_filter: '源表过滤条件',
src_field: '源表检测列',
statistics_name: '实际值名',
check_type: '校验方式',
operator: '校验操作符',
threshold: '阈值',
failure_strategy: '失败策略',
target_connector_type: '目标数据类型',
target_datasource_id: '目标数据源',
target_table: '目标数据表',
target_filter: '目标表过滤条件',
mapping_columns: 'ON语句',
statistics_execute_sql: '实际值计算SQL',
comparison_name: '期望值名',
comparison_execute_sql: '期望值计算SQL',
comparison_type: '期望值类型',
writer_connector_type: '输出数据类型',
writer_datasource_id: '输出数据源',
target_field: '目标表检测列',
field_length: '字段长度限制',
logic_operator: '逻辑操作符',
regexp_pattern: '正则表达式',
deadline: '截止时间',
datetime_format: '时间格式',
enum_list: '枚举值列表',
begin_time: '起始时间',
fix_value: '固定值',
null_check: '空值检测',
custom_sql: '自定义SQL',
single_table: '单表检测',
multi_table_accuracy: '多表准确性',
multi_table_value_comparison: '两表值比对',
field_length_check: '字段长度校验',
uniqueness_check: '唯一性校验',
regexp_check: '正则表达式',
timeliness_check: '及时性校验',
enumeration_check: '枚举值校验',
table_count_check: '表行数校验',
all: '全部',
FixValue: '固定值',
DailyAvg: '日均值',
WeeklyAvg: '周均值',
MonthlyAvg: '月均值',
Last7DayAvg: '最近7天均值',
Last30DayAvg: '最近30天均值',
SrcTableTotalRows: '源表总行数',
TargetTableTotalRows: '目标表总行数'
}
}
const crontab = {
second: '秒',
minute: '分',
hour: '时',
day: '天',
month: '月',
year: '年',
monday: '星期一',
tuesday: '星期二',
wednesday: '星期三',
thursday: '星期四',
friday: '星期五',
saturday: '星期六',
sunday: '星期天',
every_second: '每一秒钟',
every: '每隔',
second_carried_out: '秒执行 从',
second_start: '秒开始',
specific_second: '具体秒数(可多选)',
specific_second_tip: '请选择具体秒数',
cycle_from: '周期从',
to: '到',
every_minute: '每一分钟',
minute_carried_out: '分执行 从',
minute_start: '分开始',
specific_minute: '具体分钟数(可多选)',
specific_minute_tip: '请选择具体分钟数',
every_hour: '每一小时',
hour_carried_out: '小时执行 从',
hour_start: '小时开始',
specific_hour: '具体小时数(可多选)',
specific_hour_tip: '请选择具体小时数',
every_day: '每一天',
week_carried_out: '周执行 从',
start: '开始',
day_carried_out: '天执行 从',
day_start: '天开始',
specific_week: '具体星期几(可多选)',
specific_week_tip: '请选择具体周几',
specific_day: '具体天数(可多选)',
specific_day_tip: '请选择具体天数',
last_day_of_month: '在这个月的最后一天',
last_work_day_of_month: '在这个月的最后一个工作日',
last_of_month: '在这个月的最后一个',
before_end_of_month: '在本月底前',
recent_business_day_to_month: '最近的工作日(周一至周五)至本月',
in_this_months: '在这个月的第',
every_month: '每一月',
month_carried_out: '月执行 从',
month_start: '月开始',
specific_month: '具体月数(可多选)',
specific_month_tip: '请选择具体月数',
every_year: '每一年',
year_carried_out: '年执行 从',
year_start: '年开始',
specific_year: '具体年数(可多选)',
specific_year_tip: '请选择具体年数',
one_hour: '小时',
one_day: '日'
}
export default {
login,
modal,
theme,
userDropdown,
menu,
home,
password,
profile,
monitor,
resource,
project,
security,
datasource,
data_quality,
crontab
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,497 | [Feature][UI Next][V1.0.0-Alpha] Dependent tasks can re-run automatically in the case of complement | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add a select button to control the switch of complement dependent process.
Api:
Add param ```complementDependentMode``` in ```projects/{projectCode}/executors/start-process-instance```
Enum:
1. OFF_MODE (default, not required)
2. ALL_DEPENDENT
### Use case
#8373
### Related issues
#8373
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8497 | https://github.com/apache/dolphinscheduler/pull/8739 | 1d7ee2c5c444b538f3606e0ba4b22d64f0c2686d | aa5392529bb8d2ba7b4b73a9527adf713f8884c8 | "2022-02-23T04:41:14Z" | java | "2022-03-07T10:05:58Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/start-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, h, onMounted, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import Modal from '@/components/modal'
import { useForm } from './use-form'
import { useModal } from './use-modal'
import {
NForm,
NFormItem,
NButton,
NIcon,
NInput,
NSpace,
NRadio,
NRadioGroup,
NSelect,
NSwitch,
NCheckbox,
NDatePicker
} from 'naive-ui'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
PlusCircleOutlined
} from '@vicons/antd'
import { IDefinitionData } from '../types'
import styles from '../index.module.scss'
const props = {
row: {
type: Object as PropType<IDefinitionData>,
default: {}
},
show: {
type: Boolean as PropType<boolean>,
default: false
}
}
export default defineComponent({
name: 'workflowDefinitionStart',
props,
emits: ['update:show', 'update:row', 'updateList'],
setup(props, ctx) {
const parallelismRef = ref(false)
const { t } = useI18n()
const { startState } = useForm()
const {
variables,
handleStartDefinition,
getWorkerGroups,
getAlertGroups,
getEnvironmentList
} = useModal(startState, ctx)
const hideModal = () => {
ctx.emit('update:show')
}
const handleStart = () => {
handleStartDefinition(props.row.code)
}
const generalWarningTypeListOptions = () => [
{
value: 'NONE',
label: t('project.workflow.none_send')
},
{
value: 'SUCCESS',
label: t('project.workflow.success_send')
},
{
value: 'FAILURE',
label: t('project.workflow.failure_send')
},
{
value: 'ALL',
label: t('project.workflow.all_send')
}
]
const generalPriorityList = () => [
{
value: 'HIGHEST',
label: 'HIGHEST',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'HIGH',
label: 'HIGH',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'MEDIUM',
label: 'MEDIUM',
color: '#EA7D24',
icon: ArrowUpOutlined
},
{
value: 'LOW',
label: 'LOW',
color: '#2A8734',
icon: ArrowDownOutlined
},
{
value: 'LOWEST',
label: 'LOWEST',
color: '#2A8734',
icon: ArrowDownOutlined
}
]
const renderLabel = (option: any) => {
return [
h(
NIcon,
{
style: {
verticalAlign: 'middle',
marginRight: '4px',
marginBottom: '3px'
},
color: option.color
},
{
default: () => h(option.icon)
}
),
option.label
]
}
const updateWorkerGroup = () => {
startState.startForm.environmentCode = null
}
const addStartParams = () => {
variables.startParamsList.push({
prop: '',
value: ''
})
}
const updateParamsList = (index: number, param: Array<string>) => {
variables.startParamsList[index].prop = param[0]
variables.startParamsList[index].value = param[1]
}
const removeStartParams = (index: number) => {
variables.startParamsList.splice(index, 1)
}
onMounted(() => {
getWorkerGroups()
getAlertGroups()
getEnvironmentList()
})
return {
t,
parallelismRef,
hideModal,
handleStart,
generalWarningTypeListOptions,
generalPriorityList,
renderLabel,
updateWorkerGroup,
removeStartParams,
addStartParams,
updateParamsList,
...toRefs(variables),
...toRefs(startState),
...toRefs(props)
}
},
render() {
const { t } = this
return (
<Modal
show={this.show}
title={t('project.workflow.set_parameters_before_starting')}
onCancel={this.hideModal}
onConfirm={this.handleStart}
>
<NForm ref='startFormRef' label-placement='left' label-width='160'>
<NFormItem
label={t('project.workflow.workflow_name')}
path='workflow_name'
>
{this.row.name}
</NFormItem>
<NFormItem
label={t('project.workflow.failure_strategy')}
path='failureStrategy'
>
<NRadioGroup v-model:value={this.startForm.failureStrategy}>
<NSpace>
<NRadio value='CONTINUE'>
{t('project.workflow.continue')}
</NRadio>
<NRadio value='END'>{t('project.workflow.end')}</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
<NFormItem
label={t('project.workflow.notification_strategy')}
path='warningType'
>
<NSelect
options={this.generalWarningTypeListOptions()}
v-model:value={this.startForm.warningType}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.workflow_priority')}
path='processInstancePriority'
>
<NSelect
options={this.generalPriorityList()}
renderLabel={this.renderLabel}
v-model:value={this.startForm.processInstancePriority}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.worker_group')}
path='workerGroup'
>
<NSelect
options={this.workerGroups}
onUpdateValue={this.updateWorkerGroup}
v-model:value={this.startForm.workerGroup}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.environment_name')}
path='environmentCode'
>
<NSelect
options={this.environmentList.filter((item: any) =>
item.workerGroups?.includes(this.startForm.workerGroup)
)}
v-model:value={this.startForm.environmentCode}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.alarm_group')}
path='warningGroupId'
>
<NSelect
options={this.alertGroups}
placeholder={t('project.workflow.please_choose')}
v-model:value={this.startForm.warningGroupId}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.complement_data')}
path='complement_data'
>
<NCheckbox
checkedValue={'COMPLEMENT_DATA'}
uncheckedValue={'START_PROCESS'}
v-model:checked={this.startForm.execType}
>
{t('project.workflow.whether_complement_data')}
</NCheckbox>
</NFormItem>
{this.startForm.execType &&
this.startForm.execType !== 'START_PROCESS' && (
<NSpace>
<NFormItem
label={t('project.workflow.mode_of_execution')}
path='runMode'
>
<NRadioGroup v-model:value={this.startForm.runMode}>
<NSpace>
<NRadio value={'RUN_MODE_SERIAL'}>
{t('project.workflow.serial_execution')}
</NRadio>
<NRadio value={'RUN_MODE_PARALLEL'}>
{t('project.workflow.parallel_execution')}
</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
{this.startForm.runMode === 'RUN_MODE_PARALLEL' && (
<NFormItem
label={t('project.workflow.parallelism')}
path='expectedParallelismNumber'
>
<NCheckbox v-model:checked={this.parallelismRef}>
{t('project.workflow.custom_parallelism')}
</NCheckbox>
<NInput
disabled={!this.parallelismRef}
placeholder={t(
'project.workflow.please_enter_parallelism'
)}
v-model:value={this.startForm.expectedParallelismNumber}
/>
</NFormItem>
)}
<NFormItem
label={t('project.workflow.schedule_date')}
path='startEndTime'
>
<NDatePicker
type='datetimerange'
clearable
v-model:value={this.startForm.startEndTime}
/>
</NFormItem>
</NSpace>
)}
<NFormItem
label={t('project.workflow.startup_parameter')}
path='startup_parameter'
>
{this.startParamsList.length === 0 ? (
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
) : (
<NSpace vertical>
{this.startParamsList.map((item, index) => (
<NSpace class={styles.startup} key={index}>
<NInput
pair
separator=':'
placeholder={['prop', 'value']}
onUpdateValue={(param) =>
this.updateParamsList(index, param)
}
/>
<NButton
text
type='error'
onClick={() => this.removeStartParams(index)}
>
<NIcon>
<DeleteOutlined />
</NIcon>
</NButton>
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
</NSpace>
))}
</NSpace>
)}
</NFormItem>
<NFormItem
label={t('project.workflow.whether_dry_run')}
path='dryRun'
>
<NSwitch
checkedValue={1}
uncheckedValue={0}
v-model:value={this.startForm.dryRun}
/>
</NFormItem>
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,497 | [Feature][UI Next][V1.0.0-Alpha] Dependent tasks can re-run automatically in the case of complement | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add a select button to control the switch of complement dependent process.
Api:
Add param ```complementDependentMode``` in ```projects/{projectCode}/executors/start-process-instance```
Enum:
1. OFF_MODE (default, not required)
2. ALL_DEPENDENT
### Use case
#8373
### Related issues
#8373
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8497 | https://github.com/apache/dolphinscheduler/pull/8739 | 1d7ee2c5c444b538f3606e0ba4b22d64f0c2686d | aa5392529bb8d2ba7b4b73a9527adf713f8884c8 | "2022-02-23T04:41:14Z" | java | "2022-03-07T10:05:58Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/use-form.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { reactive, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import type { FormRules } from 'naive-ui'
export const useForm = () => {
const { t } = useI18n()
const date = new Date()
const year = date.getFullYear()
const month = date.getMonth()
const day = date.getDate()
const importState = reactive({
importFormRef: ref(),
importForm: {
name: '',
file: ''
},
importRules: {
file: {
required: true,
trigger: ['input', 'blur'],
validator() {
if (importState.importForm.name === '') {
return new Error(t('project.workflow.enter_name_tips'))
}
}
}
} as FormRules
})
const startState = reactive({
startFormRef: ref(),
startForm: {
processDefinitionCode: -1,
startEndTime: [new Date(year, month, day), new Date(year, month, day)],
scheduleTime: null,
failureStrategy: 'CONTINUE',
warningType: 'NONE',
warningGroupId: null,
execType: 'START_PROCESS',
startNodeList: '',
taskDependType: 'TASK_POST',
runMode: 'RUN_MODE_SERIAL',
processInstancePriority: 'MEDIUM',
workerGroup: 'default',
environmentCode: null,
startParams: null,
expectedParallelismNumber: '',
dryRun: 0
}
})
const timingState = reactive({
timingFormRef: ref(),
timingForm: {
startEndTime: [
new Date(year, month, day),
new Date(year + 100, month, day)
],
crontab: '0 0 * * * ? *',
timezoneId: Intl.DateTimeFormat().resolvedOptions().timeZone,
failureStrategy: 'CONTINUE',
warningType: 'NONE',
processInstancePriority: 'MEDIUM',
warningGroupId: '',
workerGroup: 'default',
environmentCode: null
}
})
return {
importState,
startState,
timingState
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,715 | [Bug][UI Next][V1.0.0-Alpha] Some data in the form of a task was gone when continuously clicking the same task node. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I want to edit the form of a task by double-clicking a task node in the dag, I found if I continuously double-click the same node, some data in the form was gone.
![image](https://user-images.githubusercontent.com/4928204/156917843-04128b1f-055e-4f64-b805-506b3ea15b2e.png)
![image](https://user-images.githubusercontent.com/4928204/156917857-30cdc4d2-9ef4-45a4-9ca2-59ecb0602e79.png)
### What you expected to happen
I expect that the form component can display the data of the task.
### How to reproduce
1.Enter a process dag.
2. Double-click a task node.
3. Cancel the form.
4. Double-click the same task node and then you will see that.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8715 | https://github.com/apache/dolphinscheduler/pull/8742 | aa5392529bb8d2ba7b4b73a9527adf713f8884c8 | 6e2c2adcaa3320a009afa311a5efce48787a249c | "2022-03-06T09:46:28Z" | java | "2022-03-07T11:26:50Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/detail-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
defineComponent,
PropType,
ref,
reactive,
watch,
nextTick,
provide,
computed
} from 'vue'
import { useI18n } from 'vue-i18n'
import { omit } from 'lodash'
import Modal from '@/components/modal'
import Detail from './detail'
import { formatModel } from './format-data'
import type { ITaskData, ITaskType } from './types'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
data: {
type: Object as PropType<ITaskData>,
default: { code: 0, taskType: 'SHELL', name: '' }
},
projectCode: {
type: Number as PropType<number>,
required: true
},
readonly: {
type: Boolean as PropType<boolean>,
default: false
},
from: {
type: Number as PropType<number>,
default: 0
}
}
const NodeDetailModal = defineComponent({
name: 'NodeDetailModal',
props,
emits: ['cancel', 'submit'],
setup(props, { emit }) {
const { t } = useI18n()
const detailRef = ref()
const state = reactive({
saving: false,
linkEventShowRef: ref(),
linkEventTextRef: ref(),
linkUrlRef: ref()
})
const onConfirm = async () => {
await detailRef.value.value.validate()
emit('submit', { data: detailRef.value.value.getValues() })
}
const onCancel = () => {
emit('cancel')
}
const onJumpLink = () => {
// TODO: onJumpLink
}
const getLinkEventText = (status: boolean, text: string, url: 'string') => {
state.linkEventShowRef = status
state.linkEventTextRef = text
state.linkUrlRef = url
}
const onTaskTypeChange = (taskType: ITaskType) => {
props.data.taskType = taskType
}
provide(
'data',
computed(() => ({
projectCode: props.projectCode,
data: props.data,
from: props.from,
readonly: props.readonly
}))
)
watch(
() => props.data,
async () => {
if (!props.show) return
await nextTick()
detailRef.value.value.setValues(formatModel(props.data))
}
)
return () => (
<Modal
show={props.show}
title={`${t('project.node.current_node_settings')}`}
onConfirm={onConfirm}
confirmLoading={false}
confirmDisabled={props.readonly}
onCancel={onCancel}
linkEventShow={state.linkEventShowRef}
linkEventText={state.linkEventTextRef}
onJumpLink={onJumpLink}
>
<Detail
ref={detailRef}
onTaskTypeChange={onTaskTypeChange}
key={props.data.taskType}
/>
</Modal>
)
}
})
export default NodeDetailModal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,580 | [Bug][UI Next][V1.0.0-Alpha] Errors that appear in standalone after packaging and compiling. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
I'm trying to package ```UI-Next``` into the docker image of ```dev``` branch. I try to replace the ```dist``` folder compiled by ```UI-Next``` with the previous version to the ```ui``` folder under the API module. I start the API module to access the domain/dolphinscheduler and get the following errors.
![](https://vip1.loli.io/2022/02/28/j2rmUtpwfk1sVcE.png)
And then I try to remove the absolute path ```/``` in index.html. I got another errors.
![](https://vip2.loli.io/2022/02/28/RlK7sXqxMrZQa48.png)
Is it that the compatibility of API modules has not been completed?
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
In standalone mode.
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8580 | https://github.com/apache/dolphinscheduler/pull/8748 | 220ec8f5fb93181c6b0e264b2a8d9f4bcc9785e3 | fc334b7b139960b061e6bb952029627d26f60005 | "2022-02-28T09:51:45Z" | java | "2022-03-08T03:09:08Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.configuration;
import java.util.Locale;
import org.apache.dolphinscheduler.api.interceptor.LocaleChangeInterceptor;
import org.apache.dolphinscheduler.api.interceptor.LoginHandlerInterceptor;
import org.apache.dolphinscheduler.api.interceptor.RateLimitInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.config.annotation.ContentNegotiationConfigurer;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.i18n.CookieLocaleResolver;
/**
* application configuration
*/
@Configuration
public class AppConfiguration implements WebMvcConfigurer {
public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*";
public static final String LOGIN_PATH_PATTERN = "/login";
public static final String REGISTER_PATH_PATTERN = "/users/register";
public static final String PATH_PATTERN = "/**";
public static final String LOCALE_LANGUAGE_COOKIE = "language";
@Autowired
private TrafficConfiguration trafficConfiguration;
@Bean
public CorsFilter corsFilter() {
CorsConfiguration config = new CorsConfiguration();
config.addAllowedOrigin("*");
config.addAllowedMethod("*");
config.addAllowedHeader("*");
UrlBasedCorsConfigurationSource configSource = new UrlBasedCorsConfigurationSource();
configSource.registerCorsConfiguration(PATH_PATTERN, config);
return new CorsFilter(configSource);
}
@Bean
public LoginHandlerInterceptor loginInterceptor() {
return new LoginHandlerInterceptor();
}
/**
* Cookie
* @return local resolver
*/
@Bean(name = "localeResolver")
public LocaleResolver localeResolver() {
CookieLocaleResolver localeResolver = new CookieLocaleResolver();
localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE);
// set default locale
localeResolver.setDefaultLocale(Locale.US);
// set language tag compliant
localeResolver.setLanguageTagCompliant(false);
return localeResolver;
}
@Bean
public LocaleChangeInterceptor localeChangeInterceptor() {
return new LocaleChangeInterceptor();
}
@Bean
public RateLimitInterceptor createRateLimitInterceptor() {
return new RateLimitInterceptor(trafficConfiguration);
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
// i18n
registry.addInterceptor(localeChangeInterceptor());
if (trafficConfiguration.isTrafficGlobalControlSwitch() || trafficConfiguration.isTrafficTenantControlSwitch()) {
registry.addInterceptor(createRateLimitInterceptor());
}
registry.addInterceptor(loginInterceptor())
.addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN)
.excludePathPatterns(LOGIN_PATH_PATTERN, REGISTER_PATH_PATTERN,
"/swagger-resources/**", "/webjars/**", "/v2/**",
"/doc.html", "/swagger-ui.html", "*.html", "/ui/**", "/error");
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/");
registry.addResourceHandler("doc.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/");
}
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/ui/").setViewName("forward:/ui/index.html");
registry.addViewController("/").setViewName("forward:/ui/index.html");
}
/**
* Turn off suffix-based content negotiation
*
* @param configurer configurer
*/
@Override
public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) {
configurer.favorPathExtension(false);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,741 | [Bug][UI Next][V1.0.0-Alpha] Startup Parameter not init | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Startup Parameter not init
![image](https://user-images.githubusercontent.com/8847400/157009054-1e04eec6-9d26-4a19-9d28-0e92f9c5cf15.png)
### What you expected to happen
Startup Parameter init
### How to reproduce
Set global param for workflow and open start modal
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8741 | https://github.com/apache/dolphinscheduler/pull/8749 | fc334b7b139960b061e6bb952029627d26f60005 | 799e2fd215799be050e3a4c9e58e699e93fbd8f7 | "2022-03-07T09:59:03Z" | java | "2022-03-08T04:02:04Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/start-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, h, onMounted, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import Modal from '@/components/modal'
import { useForm } from './use-form'
import { useModal } from './use-modal'
import {
NForm,
NFormItem,
NButton,
NIcon,
NInput,
NSpace,
NRadio,
NRadioGroup,
NSelect,
NSwitch,
NCheckbox,
NDatePicker
} from 'naive-ui'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
PlusCircleOutlined
} from '@vicons/antd'
import { IDefinitionData } from '../types'
import styles from '../index.module.scss'
const props = {
row: {
type: Object as PropType<IDefinitionData>,
default: {}
},
show: {
type: Boolean as PropType<boolean>,
default: false
}
}
export default defineComponent({
name: 'workflowDefinitionStart',
props,
emits: ['update:show', 'update:row', 'updateList'],
setup(props, ctx) {
const parallelismRef = ref(false)
const { t } = useI18n()
const { startState } = useForm()
const {
variables,
handleStartDefinition,
getWorkerGroups,
getAlertGroups,
getEnvironmentList
} = useModal(startState, ctx)
const hideModal = () => {
ctx.emit('update:show')
}
const handleStart = () => {
handleStartDefinition(props.row.code)
}
const generalWarningTypeListOptions = () => [
{
value: 'NONE',
label: t('project.workflow.none_send')
},
{
value: 'SUCCESS',
label: t('project.workflow.success_send')
},
{
value: 'FAILURE',
label: t('project.workflow.failure_send')
},
{
value: 'ALL',
label: t('project.workflow.all_send')
}
]
const generalPriorityList = () => [
{
value: 'HIGHEST',
label: 'HIGHEST',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'HIGH',
label: 'HIGH',
color: '#ff0000',
icon: ArrowUpOutlined
},
{
value: 'MEDIUM',
label: 'MEDIUM',
color: '#EA7D24',
icon: ArrowUpOutlined
},
{
value: 'LOW',
label: 'LOW',
color: '#2A8734',
icon: ArrowDownOutlined
},
{
value: 'LOWEST',
label: 'LOWEST',
color: '#2A8734',
icon: ArrowDownOutlined
}
]
const renderLabel = (option: any) => {
return [
h(
NIcon,
{
style: {
verticalAlign: 'middle',
marginRight: '4px',
marginBottom: '3px'
},
color: option.color
},
{
default: () => h(option.icon)
}
),
option.label
]
}
const updateWorkerGroup = () => {
startState.startForm.environmentCode = null
}
const addStartParams = () => {
variables.startParamsList.push({
prop: '',
value: ''
})
}
const updateParamsList = (index: number, param: Array<string>) => {
variables.startParamsList[index].prop = param[0]
variables.startParamsList[index].value = param[1]
}
const removeStartParams = (index: number) => {
variables.startParamsList.splice(index, 1)
}
onMounted(() => {
getWorkerGroups()
getAlertGroups()
getEnvironmentList()
})
return {
t,
parallelismRef,
hideModal,
handleStart,
generalWarningTypeListOptions,
generalPriorityList,
renderLabel,
updateWorkerGroup,
removeStartParams,
addStartParams,
updateParamsList,
...toRefs(variables),
...toRefs(startState),
...toRefs(props)
}
},
render() {
const { t } = this
return (
<Modal
show={this.show}
title={t('project.workflow.set_parameters_before_starting')}
onCancel={this.hideModal}
onConfirm={this.handleStart}
>
<NForm ref='startFormRef' label-placement='left' label-width='160'>
<NFormItem
label={t('project.workflow.workflow_name')}
path='workflow_name'
>
{this.row.name}
</NFormItem>
<NFormItem
label={t('project.workflow.failure_strategy')}
path='failureStrategy'
>
<NRadioGroup v-model:value={this.startForm.failureStrategy}>
<NSpace>
<NRadio value='CONTINUE'>
{t('project.workflow.continue')}
</NRadio>
<NRadio value='END'>{t('project.workflow.end')}</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
<NFormItem
label={t('project.workflow.notification_strategy')}
path='warningType'
>
<NSelect
options={this.generalWarningTypeListOptions()}
v-model:value={this.startForm.warningType}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.workflow_priority')}
path='processInstancePriority'
>
<NSelect
options={this.generalPriorityList()}
renderLabel={this.renderLabel}
v-model:value={this.startForm.processInstancePriority}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.worker_group')}
path='workerGroup'
>
<NSelect
options={this.workerGroups}
onUpdateValue={this.updateWorkerGroup}
v-model:value={this.startForm.workerGroup}
/>
</NFormItem>
<NFormItem
label={t('project.workflow.environment_name')}
path='environmentCode'
>
<NSelect
options={this.environmentList.filter((item: any) =>
item.workerGroups?.includes(this.startForm.workerGroup)
)}
v-model:value={this.startForm.environmentCode}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.alarm_group')}
path='warningGroupId'
>
<NSelect
options={this.alertGroups}
placeholder={t('project.workflow.please_choose')}
v-model:value={this.startForm.warningGroupId}
clearable
/>
</NFormItem>
<NFormItem
label={t('project.workflow.complement_data')}
path='complement_data'
>
<NCheckbox
checkedValue={'COMPLEMENT_DATA'}
uncheckedValue={'START_PROCESS'}
v-model:checked={this.startForm.execType}
>
{t('project.workflow.whether_complement_data')}
</NCheckbox>
</NFormItem>
{this.startForm.execType &&
this.startForm.execType !== 'START_PROCESS' && (
<NSpace>
<NFormItem
label={t('project.workflow.mode_of_dependent')}
path='dependentMode'
>
<NRadioGroup v-model:value={this.startForm.dependentMode}>
<NSpace>
<NRadio value={'OFF_MODE'}>
{t('project.workflow.close')}
</NRadio>
<NRadio value={'ALL_DEPENDENT'}>
{t('project.workflow.open')}
</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
<NFormItem
label={t('project.workflow.mode_of_execution')}
path='runMode'
>
<NRadioGroup v-model:value={this.startForm.runMode}>
<NSpace>
<NRadio value={'RUN_MODE_SERIAL'}>
{t('project.workflow.serial_execution')}
</NRadio>
<NRadio value={'RUN_MODE_PARALLEL'}>
{t('project.workflow.parallel_execution')}
</NRadio>
</NSpace>
</NRadioGroup>
</NFormItem>
{this.startForm.runMode === 'RUN_MODE_PARALLEL' && (
<NFormItem
label={t('project.workflow.parallelism')}
path='expectedParallelismNumber'
>
<NCheckbox v-model:checked={this.parallelismRef}>
{t('project.workflow.custom_parallelism')}
</NCheckbox>
<NInput
disabled={!this.parallelismRef}
placeholder={t(
'project.workflow.please_enter_parallelism'
)}
v-model:value={this.startForm.expectedParallelismNumber}
/>
</NFormItem>
)}
<NFormItem
label={t('project.workflow.schedule_date')}
path='startEndTime'
>
<NDatePicker
type='datetimerange'
clearable
v-model:value={this.startForm.startEndTime}
/>
</NFormItem>
</NSpace>
)}
<NFormItem
label={t('project.workflow.startup_parameter')}
path='startup_parameter'
>
{this.startParamsList.length === 0 ? (
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
) : (
<NSpace vertical>
{this.startParamsList.map((item, index) => (
<NSpace class={styles.startup} key={index}>
<NInput
pair
separator=':'
placeholder={['prop', 'value']}
onUpdateValue={(param) =>
this.updateParamsList(index, param)
}
/>
<NButton
text
type='error'
onClick={() => this.removeStartParams(index)}
>
<NIcon>
<DeleteOutlined />
</NIcon>
</NButton>
<NButton text type='primary' onClick={this.addStartParams}>
<NIcon>
<PlusCircleOutlined />
</NIcon>
</NButton>
</NSpace>
))}
</NSpace>
)}
</NFormItem>
<NFormItem
label={t('project.workflow.whether_dry_run')}
path='dryRun'
>
<NSwitch
checkedValue={1}
uncheckedValue={0}
v-model:value={this.startForm.dryRun}
/>
</NFormItem>
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,741 | [Bug][UI Next][V1.0.0-Alpha] Startup Parameter not init | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Startup Parameter not init
![image](https://user-images.githubusercontent.com/8847400/157009054-1e04eec6-9d26-4a19-9d28-0e92f9c5cf15.png)
### What you expected to happen
Startup Parameter init
### How to reproduce
Set global param for workflow and open start modal
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8741 | https://github.com/apache/dolphinscheduler/pull/8749 | fc334b7b139960b061e6bb952029627d26f60005 | 799e2fd215799be050e3a4c9e58e699e93fbd8f7 | "2022-03-07T09:59:03Z" | java | "2022-03-08T04:02:04Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/use-modal.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import { reactive, SetupContext } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import { format } from 'date-fns'
import { importProcessDefinition } from '@/service/modules/process-definition'
import { queryAllWorkerGroups } from '@/service/modules/worker-groups'
import { queryAllEnvironmentList } from '@/service/modules/environment'
import { listAlertGroupById } from '@/service/modules/alert-group'
import { startProcessInstance } from '@/service/modules/executors'
import {
createSchedule,
updateSchedule,
previewSchedule
} from '@/service/modules/schedules'
import { parseTime } from '@/utils/common'
export function useModal(
state: any,
ctx: SetupContext<('update:show' | 'update:row' | 'updateList')[]>
) {
const { t } = useI18n()
const router: Router = useRouter()
const variables = reactive({
projectCode: Number(router.currentRoute.value.params.projectCode),
workerGroups: [],
alertGroups: [],
environmentList: [],
startParamsList: [] as Array<{ prop: string; value: string }>,
schedulePreviewList: []
})
const resetImportForm = () => {
state.importFormRef.name = ''
state.importFormRef.file = ''
}
const handleImportDefinition = () => {
state.importFormRef.validate(async (valid: any) => {
if (!valid) {
try {
const formData = new FormData()
formData.append('file', state.importForm.file)
const code = Number(router.currentRoute.value.params.projectCode)
await importProcessDefinition(formData, code)
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
ctx.emit('update:show')
resetImportForm()
} catch (error: any) {
window.$message.error(error.message)
}
}
})
}
const handleStartDefinition = (code: number) => {
state.startFormRef.validate(async (valid: any) => {
if (!valid) {
state.startForm.processDefinitionCode = code
if (state.startForm.startEndTime) {
const start = format(
new Date(state.startForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
new Date(state.startForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
state.startForm.scheduleTime = `${start},${end}`
}
const startParams = {} as any
for (const item of variables.startParamsList) {
if (item.value !== '') {
startParams[item.prop] = item.value
}
}
state.startForm.startParams = !_.isEmpty(startParams)
? JSON.stringify(startParams)
: ''
try {
await startProcessInstance(state.startForm, variables.projectCode)
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
ctx.emit('update:show')
} catch (error: any) {
window.$message.error(error.message)
}
}
})
}
const handleCreateTiming = (code: number) => {
state.timingFormRef.validate(async (valid: any) => {
if (!valid) {
const data: any = getTimingData()
data.processDefinitionCode = code
try {
await createSchedule(data, variables.projectCode)
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
ctx.emit('update:show')
} catch (error: any) {
window.$message.error(error.message)
}
}
})
}
const handleUpdateTiming = (id: number) => {
state.timingFormRef.validate(async (valid: any) => {
if (!valid) {
const data: any = getTimingData()
data.id = id
try {
await updateSchedule(data, variables.projectCode, id)
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
ctx.emit('update:show')
} catch (error: any) {
window.$message.error(error.message)
}
}
})
}
const getTimingData = () => {
const start = format(
parseTime(state.timingForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
parseTime(state.timingForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
const data = {
schedule: JSON.stringify({
startTime: start,
endTime: end,
crontab: state.timingForm.crontab,
timezoneId: state.timingForm.timezoneId
}),
failureStrategy: state.timingForm.failureStrategy,
warningType: state.timingForm.warningType,
processInstancePriority: state.timingForm.processInstancePriority,
warningGroupId:
state.timingForm.warningGroupId === ''
? 0
: state.timingForm.warningGroupId,
workerGroup: state.timingForm.workerGroup,
environmentCode: state.timingForm.environmentCode
}
return data
}
const getWorkerGroups = () => {
queryAllWorkerGroups().then((res: any) => {
variables.workerGroups = res.map((item: string) => ({
label: item,
value: item
}))
})
}
const getEnvironmentList = () => {
queryAllEnvironmentList().then((res: any) => {
variables.environmentList = res.map((item: any) => ({
label: item.name,
value: item.code,
workerGroups: item.workerGroups
}))
})
}
const getAlertGroups = () => {
listAlertGroupById().then((res: any) => {
variables.alertGroups = res.map((item: any) => ({
label: item.groupName,
value: item.id
}))
})
}
const getPreviewSchedule = () => {
state.timingFormRef.validate(async (valid: any) => {
if (!valid) {
const projectCode = Number(router.currentRoute.value.params.projectCode)
const start = format(
new Date(state.timingForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
new Date(state.timingForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
const schedule = JSON.stringify({
startTime: start,
endTime: end,
crontab: state.timingForm.crontab
})
previewSchedule({ schedule }, projectCode)
.then((res: any) => {
variables.schedulePreviewList = res
})
.catch((error: any) => {
window.$message.error(error.message)
})
}
})
}
return {
variables,
handleImportDefinition,
handleStartDefinition,
handleCreateTiming,
handleUpdateTiming,
getWorkerGroups,
getAlertGroups,
getEnvironmentList,
getPreviewSchedule
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,730 | [Bug][UI Next][V1.0.0-Alpha] The workflow timeout alarm time validate | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Update workflow definition error when I set the timeout alarm time 0.5 minutes.
![image](https://user-images.githubusercontent.com/8847400/156982280-2028678f-0538-476c-91e1-d24eeb640430.png)
![image](https://user-images.githubusercontent.com/8847400/156982558-2054e692-c022-407f-af7e-79ecebcaf498.png)
### What you expected to happen
Update workflow success
### How to reproduce
Update workflow success and set alarm timeout 0.5 minute.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8730 | https://github.com/apache/dolphinscheduler/pull/8750 | 799e2fd215799be050e3a4c9e58e699e93fbd8f7 | 080a237b0d8f0adefde22c922ad2c52fa26c133e | "2022-03-07T06:55:00Z" | java | "2022-03-08T04:25:31Z" | dolphinscheduler-ui-next/src/utils/regex.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const regex = {
email: /^[A-Za-z0-9\u4e00-\u9fa5\.]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$/, // support Chinese mailbox
phone: /^1\d{10}$/,
password: /^(?![0-9]+$)(?![a-zA-Z]+$)[0-9A-Za-z]{6,20}$/
}
export default regex
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,730 | [Bug][UI Next][V1.0.0-Alpha] The workflow timeout alarm time validate | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Update workflow definition error when I set the timeout alarm time 0.5 minutes.
![image](https://user-images.githubusercontent.com/8847400/156982280-2028678f-0538-476c-91e1-d24eeb640430.png)
![image](https://user-images.githubusercontent.com/8847400/156982558-2054e692-c022-407f-af7e-79ecebcaf498.png)
### What you expected to happen
Update workflow success
### How to reproduce
Update workflow success and set alarm timeout 0.5 minute.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8730 | https://github.com/apache/dolphinscheduler/pull/8750 | 799e2fd215799be050e3a4c9e58e699e93fbd8f7 | 080a237b0d8f0adefde22c922ad2c52fa26c133e | "2022-03-07T06:55:00Z" | java | "2022-03-08T04:25:31Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/dag-save-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, ref, computed, onMounted, watch } from 'vue'
import Modal from '@/components/modal'
import { useI18n } from 'vue-i18n'
import {
NForm,
NFormItem,
NInput,
NSelect,
NSwitch,
NInputNumber,
NDynamicInput,
NCheckbox
} from 'naive-ui'
import { queryTenantList } from '@/service/modules/tenants'
import { SaveForm, WorkflowDefinition } from './types'
import { useRoute } from 'vue-router'
import { verifyName } from '@/service/modules/process-definition'
import './x6-style.scss'
const props = {
visible: {
type: Boolean as PropType<boolean>,
default: false
},
// If this prop is passed, it means from definition detail
definition: {
type: Object as PropType<WorkflowDefinition>,
default: undefined
}
}
interface Tenant {
tenantCode: string
id: number
}
export default defineComponent({
name: 'dag-save-modal',
props,
emits: ['update:show', 'save'],
setup(props, context) {
const route = useRoute()
const { t } = useI18n()
const projectCode = Number(route.params.projectCode)
const tenants = ref<Tenant[]>([])
const tenantsDropdown = computed(() => {
if (tenants.value) {
return tenants.value
.map((t) => ({
label: t.tenantCode,
value: t.tenantCode
}))
.concat({ label: 'default', value: 'default' })
}
return []
})
onMounted(() => {
queryTenantList().then((res: any) => {
tenants.value = res
})
})
const formValue = ref<SaveForm>({
name: '',
description: '',
tenantCode: 'default',
timeoutFlag: false,
timeout: 0,
globalParams: [],
release: false
})
const formRef = ref()
const rule = {
name: {
required: true,
message: t('project.dag.dag_name_empty')
},
timeout: {
validator() {
if (formValue.value.timeoutFlag && formValue.value.timeout <= 0) {
return new Error(t('project.dag.positive_integer'))
}
}
},
globalParams: {
validator() {
const props = new Set()
for (const param of formValue.value.globalParams) {
const prop = param.value
if (!prop) {
return new Error(t('project.dag.prop_empty'))
}
if (props.has(prop)) {
return new Error(t('project.dag.prop_repeat'))
}
props.add(prop)
}
}
}
}
const onSubmit = () => {
formRef.value.validate(async (valid: any) => {
if (!valid) {
const params = {
name: formValue.value.name
}
if (
props.definition?.processDefinition.name !== formValue.value.name
) {
verifyName(params, projectCode)
.then(() => context.emit('save', formValue.value))
.catch((error: any) => {
window.$message.error(error.message)
})
} else {
context.emit('save', formValue.value)
}
}
})
}
const onCancel = () => {
context.emit('update:show', false)
}
const updateModalData = () => {
const process = props.definition?.processDefinition
if (process) {
formValue.value.name = process.name
formValue.value.description = process.description
formValue.value.tenantCode = process.tenantCode
if (process.timeout && process.timeout > 0) {
formValue.value.timeoutFlag = true
formValue.value.timeout = process.timeout
}
formValue.value.globalParams = process.globalParamList.map((param) => ({
key: param.prop,
value: param.value
}))
}
}
onMounted(() => updateModalData())
watch(
() => props.definition?.processDefinition,
() => updateModalData()
)
return () => (
<Modal
show={props.visible}
title={t('project.dag.basic_info')}
onConfirm={onSubmit}
onCancel={onCancel}
autoFocus={false}
>
<NForm
label-width='100'
model={formValue.value}
rules={rule}
size='medium'
label-placement='left'
ref={formRef}
>
<NFormItem label={t('project.dag.workflow_name')} path='name'>
<NInput v-model:value={formValue.value.name} />
</NFormItem>
<NFormItem label={t('project.dag.description')} path='description'>
<NInput
type='textarea'
v-model:value={formValue.value.description}
/>
</NFormItem>
<NFormItem label={t('project.dag.tenant')} path='tenantCode'>
<NSelect
options={tenantsDropdown.value}
v-model:value={formValue.value.tenantCode}
/>
</NFormItem>
<NFormItem label={t('project.dag.timeout_alert')} path='timeoutFlag'>
<NSwitch v-model:value={formValue.value.timeoutFlag} />
</NFormItem>
{formValue.value.timeoutFlag && (
<NFormItem label=' ' path='timeout'>
<NInputNumber
v-model:value={formValue.value.timeout}
show-button={false}
min={0}
v-slots={{
suffix: () => '分'
}}
></NInputNumber>
</NFormItem>
)}
<NFormItem
label={t('project.dag.global_variables')}
path='globalParams'
>
<NDynamicInput
v-model:value={formValue.value.globalParams}
preset='pair'
key-placeholder={t('project.dag.key')}
value-placeholder={t('project.dag.value')}
/>
</NFormItem>
{props.definition && (
<NFormItem label=' ' path='timeoutFlag'>
<NCheckbox v-model:checked={formValue.value.release}>
{t('project.dag.online_directly')}
</NCheckbox>
</NFormItem>
)}
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,729 | [Bug][UI-Next][V1.0.0-Alpha] Backend locale should be consistent with ui | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/03/07/5wBrp64WqRLK8d7.png)
The backend uses the cookie name ```language``` to get the locale in dolphinscheduler-ui.
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8729 | https://github.com/apache/dolphinscheduler/pull/8755 | d3a8fa1d987e32ae5ff1c2755987c6b07254eb88 | cbb756e1cbed47e296ccc1d25c3de9e7bc13009a | "2022-03-07T06:32:15Z" | java | "2022-03-08T09:51:35Z" | dolphinscheduler-ui-next/package.json | {
"name": "dolphinscheduler-ui-next",
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build:prod": "vue-tsc --noEmit && vite build --mode production",
"preview": "vite preview",
"lint": "eslint src --fix --ext .ts,.tsx,.vue",
"prettier": "prettier --write \"src/**/*.{vue,ts,tsx}\""
},
"dependencies": {
"@antv/layout": "^0.1.31",
"@antv/x6": "^1.30.1",
"@vueuse/core": "^7.6.2",
"axios": "^0.24.0",
"date-fns": "^2.28.0",
"echarts": "^5.3.0",
"lodash": "^4.17.21",
"monaco-editor": "^0.31.1",
"naive-ui": "^2.26.0",
"nprogress": "^0.2.0",
"pinia": "^2.0.11",
"pinia-plugin-persistedstate": "^1.2.2",
"qs": "^6.10.3",
"vfonts": "^0.1.0",
"vue": "^3.2.31",
"vue-i18n": "^9.2.0-beta.30",
"vue-router": "^4.0.12"
},
"devDependencies": {
"@types/node": "^16.11.25",
"@types/nprogress": "^0.2.0",
"@types/qs": "^6.9.7",
"@typescript-eslint/eslint-plugin": "^5.12.1",
"@typescript-eslint/parser": "^5.12.1",
"@vicons/antd": "^0.11.0",
"@vitejs/plugin-vue": "^1.10.2",
"@vitejs/plugin-vue-jsx": "^1.3.7",
"dart-sass": "^1.25.0",
"eslint": "^8.9.0",
"eslint-config-prettier": "^8.4.0",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-vue": "^8.5.0",
"prettier": "^2.5.1",
"sass": "^1.49.8",
"sass-loader": "^12.6.0",
"typescript": "^4.5.5",
"typescript-plugin-css-modules": "^3.4.0",
"vite": "^2.8.4",
"vite-plugin-compression": "^0.3.6",
"vue-tsc": "^0.28.10"
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,729 | [Bug][UI-Next][V1.0.0-Alpha] Backend locale should be consistent with ui | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/03/07/5wBrp64WqRLK8d7.png)
The backend uses the cookie name ```language``` to get the locale in dolphinscheduler-ui.
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8729 | https://github.com/apache/dolphinscheduler/pull/8755 | d3a8fa1d987e32ae5ff1c2755987c6b07254eb88 | cbb756e1cbed47e296ccc1d25c3de9e7bc13009a | "2022-03-07T06:32:15Z" | java | "2022-03-08T09:51:35Z" | dolphinscheduler-ui-next/src/layouts/content/components/locales/use-dropdown.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { DropdownOption } from 'naive-ui'
import { useI18n } from 'vue-i18n'
import { useLocalesStore } from '@/store/locales/locales'
import type { Locales } from '@/store/locales/types'
export function useDropDown(chooseVal: any) {
const { locale } = useI18n()
const localesStore = useLocalesStore()
const handleSelect = (key: string | number, option: DropdownOption) => {
// console.log(key, option)
chooseVal.value = option.label
locale.value = key as Locales
localesStore.setLocales(locale.value as Locales)
}
return {
handleSelect
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,729 | [Bug][UI-Next][V1.0.0-Alpha] Backend locale should be consistent with ui | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/03/07/5wBrp64WqRLK8d7.png)
The backend uses the cookie name ```language``` to get the locale in dolphinscheduler-ui.
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8729 | https://github.com/apache/dolphinscheduler/pull/8755 | d3a8fa1d987e32ae5ff1c2755987c6b07254eb88 | cbb756e1cbed47e296ccc1d25c3de9e7bc13009a | "2022-03-07T06:32:15Z" | java | "2022-03-08T09:51:35Z" | dolphinscheduler-ui-next/src/service/service.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import axios, { AxiosRequestConfig, AxiosResponse, AxiosError } from 'axios'
import { useUserStore } from '@/store/user/user'
import qs from 'qs'
import _ from 'lodash'
import router from '@/router'
const userStore = useUserStore()
const baseRequestConfig: AxiosRequestConfig = {
baseURL:
import.meta.env.MODE === 'development'
? '/dolphinscheduler'
: import.meta.env.VITE_APP_PROD_WEB_URL + '/dolphinscheduler',
timeout: 10000,
transformRequest: (params) => {
if (_.isPlainObject(params)) {
return qs.stringify(params, { arrayFormat: 'repeat' })
} else {
return params
}
},
paramsSerializer: (params) => {
return qs.stringify(params, { arrayFormat: 'repeat' })
}
}
const service = axios.create(baseRequestConfig)
const err = (err: AxiosError): Promise<AxiosError> => {
if (err.response?.status === 401 || err.response?.status === 504) {
userStore.setSessionId('')
userStore.setUserInfo({})
router.push({ path: '/login' })
}
return Promise.reject(err)
}
service.interceptors.request.use((config: AxiosRequestConfig<any>) => {
config.headers && (config.headers.sessionId = userStore.getSessionId)
return config
}, err)
// The response to intercept
service.interceptors.response.use((res: AxiosResponse) => {
// No code will be processed
if (res.data.code === undefined) {
return res.data
}
switch (res.data.code) {
case 0:
return res.data.data
default:
throw new Error(`${res.data.msg}: ${res.config.url}`)
}
}, err)
const apiPrefix = '/dolphinscheduler'
const reSlashPrefix = /^\/+/
const resolveURL = (url: string) => {
if (url.indexOf('http') === 0) {
return url
}
if (url.charAt(0) !== '/') {
return `${apiPrefix}/${url.replace(reSlashPrefix, '')}`
}
return url
}
/**
* download file
*/
const downloadFile = (url: string, obj?: any) => {
const param: any = {
url: resolveURL(url),
obj: obj || {}
}
const form = document.createElement('form')
form.action = param.url
form.method = 'get'
form.style.display = 'none'
Object.keys(param.obj).forEach((key) => {
const input = document.createElement('input')
input.type = 'hidden'
input.name = key
input.value = param.obj[key]
form.appendChild(input)
})
const button = document.createElement('input')
button.type = 'submit'
form.appendChild(button)
document.body.appendChild(form)
form.submit()
document.body.removeChild(form)
}
export { service as axios, downloadFile }
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,744 | [Bug] [standalone-server] start standalone server failed | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
```permission denied: ./standalone-server/bin/start.sh```
### What you expected to happen
success.
### How to reproduce
```./bin/dolphinscheduler-daemon.sh start standalone-server```
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8744 | https://github.com/apache/dolphinscheduler/pull/8747 | c661827752f7d17f52426b7182bbe43caa15d066 | b024aabd123f94840b51f1da738e017a16312cee | "2022-03-07T11:11:48Z" | java | "2022-03-09T05:26:28Z" | script/dolphinscheduler-daemon.sh | #!/bin/sh
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
usage="Usage: dolphinscheduler-daemon.sh (start|stop|status) <api-server|master-server|worker-server|alert-server|python-gateway-server|standalone-server> "
# if no args specified, show usage
if [ $# -le 1 ]; then
echo $usage
exit 1
fi
startStop=$1
shift
command=$1
shift
echo "Begin $startStop $command......"
BIN_DIR=`dirname $0`
BIN_DIR=`cd "$BIN_DIR"; pwd`
DOLPHINSCHEDULER_HOME=`cd "$BIN_DIR/.."; pwd`
source "${DOLPHINSCHEDULER_HOME}/bin/env/dolphinscheduler_env.sh"
export HOSTNAME=`hostname`
export DOLPHINSCHEDULER_LOG_DIR=$DOLPHINSCHEDULER_HOME/$command/logs
export STOP_TIMEOUT=5
if [ ! -d "$DOLPHINSCHEDULER_LOG_DIR" ]; then
mkdir $DOLPHINSCHEDULER_LOG_DIR
fi
pid=$DOLPHINSCHEDULER_HOME/$command/pid
cd $DOLPHINSCHEDULER_HOME/$command
if [ "$command" = "api-server" ]; then
log=$DOLPHINSCHEDULER_HOME/api-server/logs/$command-$HOSTNAME.out
elif [ "$command" = "master-server" ]; then
log=$DOLPHINSCHEDULER_HOME/master-server/logs/$command-$HOSTNAME.out
elif [ "$command" = "worker-server" ]; then
log=$DOLPHINSCHEDULER_HOME/worker-server/logs/$command-$HOSTNAME.out
elif [ "$command" = "alert-server" ]; then
log=$DOLPHINSCHEDULER_HOME/alert-server/logs/$command-$HOSTNAME.out
elif [ "$command" = "standalone-server" ]; then
log=$DOLPHINSCHEDULER_HOME/standalone-server/logs/$command-$HOSTNAME.out
elif [ "$command" = "python-gateway-server" ]; then
log=$DOLPHINSCHEDULER_HOME/python-gateway-server/logs/$command-$HOSTNAME.out
else
echo "Error: No command named '$command' was found."
exit 1
fi
case $startStop in
(start)
echo starting $command, logging to $DOLPHINSCHEDULER_LOG_DIR
nohup "$DOLPHINSCHEDULER_HOME/$command/bin/start.sh" > $log 2>&1 &
echo $! > $pid
;;
(stop)
if [ -f $pid ]; then
TARGET_PID=`cat $pid`
if kill -0 $TARGET_PID > /dev/null 2>&1; then
echo stopping $command
pkill -P $TARGET_PID
sleep $STOP_TIMEOUT
if kill -0 $TARGET_PID > /dev/null 2>&1; then
echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9"
pkill -P -9 $TARGET_PID
fi
else
echo no $command to stop
fi
rm -f $pid
else
echo no $command to stop
fi
;;
(status)
# more details about the status can be added later
serverCount=`ps -ef | grep "$DOLPHINSCHEDULER_HOME" | grep "$CLASS" | grep -v "grep" | wc -l`
state="STOP"
# font color - red
state="[ \033[1;31m $state \033[0m ]"
if [[ $serverCount -gt 0 ]];then
state="RUNNING"
# font color - green
state="[ \033[1;32m $state \033[0m ]"
fi
echo -e "$command $state"
;;
(*)
echo $usage
exit 1
;;
esac
echo "End $startStop $command."
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,648 | [Bug][UI Next][V1.0.0-Alpha] create workflow definition error in standalone mode | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/03/02/8sUmlo53pMhLAIX.png)
![](https://vip1.loli.io/2022/03/02/gRPzuWv7V5Mb93O.png)
No requests were sent in standalone mode.
```http://localhost:3000/dolphinscheduler/tenants/list``` request will be sent In dev mode.
### What you expected to happen
successfully.
### How to reproduce
create workflow definition in standalone mode.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8648 | https://github.com/apache/dolphinscheduler/pull/8767 | fcbaaf5de62ad11b0d7fd445741af3dab4227149 | 1d1c99b31f54ac2e5a7761242c9b20565a92a546 | "2022-03-02T03:07:15Z" | java | "2022-03-09T08:31:42Z" | dolphinscheduler-ui-next/index.html | <!--
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
-->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Dolphin Scheduler Admin</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.ts"></script>
</body>
</html>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,648 | [Bug][UI Next][V1.0.0-Alpha] create workflow definition error in standalone mode | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip1.loli.io/2022/03/02/8sUmlo53pMhLAIX.png)
![](https://vip1.loli.io/2022/03/02/gRPzuWv7V5Mb93O.png)
No requests were sent in standalone mode.
```http://localhost:3000/dolphinscheduler/tenants/list``` request will be sent In dev mode.
### What you expected to happen
successfully.
### How to reproduce
create workflow definition in standalone mode.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8648 | https://github.com/apache/dolphinscheduler/pull/8767 | fcbaaf5de62ad11b0d7fd445741af3dab4227149 | 1d1c99b31f54ac2e5a7761242c9b20565a92a546 | "2022-03-02T03:07:15Z" | java | "2022-03-09T08:31:42Z" | dolphinscheduler-ui-next/public/lodash.min.js | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,772 | [block-Bug][UI Next][V1.0.0-Alpha]Account login error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The account and password are correct, the login interface works properly, and an error message is displayed on the get_user_info interface
<img width="1918" alt="image" src="https://user-images.githubusercontent.com/76080484/157405884-912e6b71-fbf5-4108-92bf-c405fe4f288d.png">
### What you expected to happen
Login successful
### How to reproduce
Open the web page, enter the correct account password, and click login
### Anything else
No
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8772 | https://github.com/apache/dolphinscheduler/pull/8773 | 1d1c99b31f54ac2e5a7761242c9b20565a92a546 | 59410af0aeb3eeca6eed0395050c1159a8c4dc6c | "2022-03-09T08:52:43Z" | java | "2022-03-09T10:13:44Z" | dolphinscheduler-ui-next/src/layouts/content/components/user/use-dropdown.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { useRouter } from 'vue-router'
import { logout } from '@/service/modules/logout'
import { useUserStore } from '@/store/user/user'
import { useMenuStore } from '@/store/menu/menu'
import type { Router } from 'vue-router'
import { DropdownOption } from 'naive-ui'
export function useDropDown() {
const router: Router = useRouter()
const userStore = useUserStore()
const menuStore = useMenuStore()
const handleSelect = (key: string | number, unused: DropdownOption) => {
if (key === 'logout') {
useLogout()
} else if (key === 'password') {
router.push({ path: '/password' })
} else if (key === 'profile') {
router.push({ path: '/profile' })
}
}
const useLogout = () => {
logout().then(() => {
userStore.setSessionId('')
userStore.setUserInfo({})
router.push({ path: '/login' })
})
}
return {
handleSelect
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,772 | [block-Bug][UI Next][V1.0.0-Alpha]Account login error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The account and password are correct, the login interface works properly, and an error message is displayed on the get_user_info interface
<img width="1918" alt="image" src="https://user-images.githubusercontent.com/76080484/157405884-912e6b71-fbf5-4108-92bf-c405fe4f288d.png">
### What you expected to happen
Login successful
### How to reproduce
Open the web page, enter the correct account password, and click login
### Anything else
No
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8772 | https://github.com/apache/dolphinscheduler/pull/8773 | 1d1c99b31f54ac2e5a7761242c9b20565a92a546 | 59410af0aeb3eeca6eed0395050c1159a8c4dc6c | "2022-03-09T08:52:43Z" | java | "2022-03-09T10:13:44Z" | dolphinscheduler-ui-next/src/views/login/use-login.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { useRouter } from 'vue-router'
import { login } from '@/service/modules/login'
import { getUserInfo } from '@/service/modules/users'
import { useUserStore } from '@/store/user/user'
import type { Router } from 'vue-router'
import type { SessionIdRes } from '@/service/modules/login/types'
import type { UserInfoRes } from '@/service/modules/users/types'
import { useMenuStore } from '@/store/menu/menu'
export function useLogin(state: any) {
const router: Router = useRouter()
const userStore = useUserStore()
const menuStore = useMenuStore()
const handleLogin = () => {
state.loginFormRef.validate(async (valid: any) => {
if (!valid) {
const loginRes: SessionIdRes = await login({ ...state.loginForm })
await userStore.setSessionId(loginRes.sessionId)
const userInfoRes: UserInfoRes = await getUserInfo()
await userStore.setUserInfo(userInfoRes)
const key = menuStore.getMenuKey
router.push({ path: key || 'home' })
}
})
}
return {
handleLogin
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,757 | [Bug][UI Next][V1.0.0-Alpha] There isn't the text button of the help document in the task form. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In the task form there isn't the text button of the help document for this type of task. But in the old UI, you can use this text button to access the help document when clicking the button.
The task form in the old UI seems like:
![image](https://user-images.githubusercontent.com/4928204/157206702-f3fcc62b-d601-4b0c-9a64-0eca84e3736d.png)
The task form in the new UI seems like:
![image](https://user-images.githubusercontent.com/4928204/157206713-5b58c180-cbc3-4629-a951-d37bd54707f6.png)
### What you expected to happen
I expect that I can use the text button to access the help document .
### How to reproduce
You can see the task form in the new UI.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8757 | https://github.com/apache/dolphinscheduler/pull/8775 | 59410af0aeb3eeca6eed0395050c1159a8c4dc6c | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | "2022-03-08T09:22:11Z" | java | "2022-03-09T10:15:07Z" | dolphinscheduler-ui-next/public/lodash.min.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(){function n(n,t,r){switch(r.length){case 0:return n.call(t);case 1:return n.call(t,r[0]);case 2:return n.call(t,r[0],r[1]);case 3:return n.call(t,r[0],r[1],r[2])}return n.apply(t,r)}function t(n,t,r,e){for(var u=-1,i=null==n?0:n.length;++u<i;){var o=n[u];t(e,o,r(o),n)}return e}function r(n,t){for(var r=-1,e=null==n?0:n.length;++r<e&&t(n[r],r,n)!==!1;);return n}function e(n,t){for(var r=null==n?0:n.length;r--&&t(n[r],r,n)!==!1;);return n}function u(n,t){for(var r=-1,e=null==n?0:n.length;++r<e;)if(!t(n[r],r,n))return!1;
return!0}function i(n,t){for(var r=-1,e=null==n?0:n.length,u=0,i=[];++r<e;){var o=n[r];t(o,r,n)&&(i[u++]=o)}return i}function o(n,t){return!!(null==n?0:n.length)&&y(n,t,0)>-1}function f(n,t,r){for(var e=-1,u=null==n?0:n.length;++e<u;)if(r(t,n[e]))return!0;return!1}function c(n,t){for(var r=-1,e=null==n?0:n.length,u=Array(e);++r<e;)u[r]=t(n[r],r,n);return u}function a(n,t){for(var r=-1,e=t.length,u=n.length;++r<e;)n[u+r]=t[r];return n}function l(n,t,r,e){var u=-1,i=null==n?0:n.length;for(e&&i&&(r=n[++u]);++u<i;)r=t(r,n[u],u,n);
return r}function s(n,t,r,e){var u=null==n?0:n.length;for(e&&u&&(r=n[--u]);u--;)r=t(r,n[u],u,n);return r}function h(n,t){for(var r=-1,e=null==n?0:n.length;++r<e;)if(t(n[r],r,n))return!0;return!1}function p(n){return n.split("")}function _(n){return n.match($t)||[]}function v(n,t,r){var e;return r(n,function(n,r,u){if(t(n,r,u))return e=r,!1}),e}function g(n,t,r,e){for(var u=n.length,i=r+(e?1:-1);e?i--:++i<u;)if(t(n[i],i,n))return i;return-1}function y(n,t,r){return t===t?Z(n,t,r):g(n,b,r)}function d(n,t,r,e){
for(var u=r-1,i=n.length;++u<i;)if(e(n[u],t))return u;return-1}function b(n){return n!==n}function w(n,t){var r=null==n?0:n.length;return r?k(n,t)/r:Cn}function m(n){return function(t){return null==t?X:t[n]}}function x(n){return function(t){return null==n?X:n[t]}}function j(n,t,r,e,u){return u(n,function(n,u,i){r=e?(e=!1,n):t(r,n,u,i)}),r}function A(n,t){var r=n.length;for(n.sort(t);r--;)n[r]=n[r].value;return n}function k(n,t){for(var r,e=-1,u=n.length;++e<u;){var i=t(n[e]);i!==X&&(r=r===X?i:r+i);
}return r}function O(n,t){for(var r=-1,e=Array(n);++r<n;)e[r]=t(r);return e}function I(n,t){return c(t,function(t){return[t,n[t]]})}function R(n){return n?n.slice(0,H(n)+1).replace(Lt,""):n}function z(n){return function(t){return n(t)}}function E(n,t){return c(t,function(t){return n[t]})}function S(n,t){return n.has(t)}function W(n,t){for(var r=-1,e=n.length;++r<e&&y(t,n[r],0)>-1;);return r}function L(n,t){for(var r=n.length;r--&&y(t,n[r],0)>-1;);return r}function C(n,t){for(var r=n.length,e=0;r--;)n[r]===t&&++e;
return e}function U(n){return"\\"+Yr[n]}function B(n,t){return null==n?X:n[t]}function T(n){return Nr.test(n)}function $(n){return Pr.test(n)}function D(n){for(var t,r=[];!(t=n.next()).done;)r.push(t.value);return r}function M(n){var t=-1,r=Array(n.size);return n.forEach(function(n,e){r[++t]=[e,n]}),r}function F(n,t){return function(r){return n(t(r))}}function N(n,t){for(var r=-1,e=n.length,u=0,i=[];++r<e;){var o=n[r];o!==t&&o!==cn||(n[r]=cn,i[u++]=r)}return i}function P(n){var t=-1,r=Array(n.size);
return n.forEach(function(n){r[++t]=n}),r}function q(n){var t=-1,r=Array(n.size);return n.forEach(function(n){r[++t]=[n,n]}),r}function Z(n,t,r){for(var e=r-1,u=n.length;++e<u;)if(n[e]===t)return e;return-1}function K(n,t,r){for(var e=r+1;e--;)if(n[e]===t)return e;return e}function V(n){return T(n)?J(n):_e(n)}function G(n){return T(n)?Y(n):p(n)}function H(n){for(var t=n.length;t--&&Ct.test(n.charAt(t)););return t}function J(n){for(var t=Mr.lastIndex=0;Mr.test(n);)++t;return t}function Y(n){return n.match(Mr)||[];
}function Q(n){return n.match(Fr)||[]}var X,nn="4.17.21",tn=200,rn="Unsupported core-js use. Try https://npms.io/search?q=ponyfill.",en="Expected a function",un="Invalid `variable` option passed into `_.template`",on="__lodash_hash_undefined__",fn=500,cn="__lodash_placeholder__",an=1,ln=2,sn=4,hn=1,pn=2,_n=1,vn=2,gn=4,yn=8,dn=16,bn=32,wn=64,mn=128,xn=256,jn=512,An=30,kn="...",On=800,In=16,Rn=1,zn=2,En=3,Sn=1/0,Wn=9007199254740991,Ln=1.7976931348623157e308,Cn=NaN,Un=4294967295,Bn=Un-1,Tn=Un>>>1,$n=[["ary",mn],["bind",_n],["bindKey",vn],["curry",yn],["curryRight",dn],["flip",jn],["partial",bn],["partialRight",wn],["rearg",xn]],Dn="[object Arguments]",Mn="[object Array]",Fn="[object AsyncFunction]",Nn="[object Boolean]",Pn="[object Date]",qn="[object DOMException]",Zn="[object Error]",Kn="[object Function]",Vn="[object GeneratorFunction]",Gn="[object Map]",Hn="[object Number]",Jn="[object Null]",Yn="[object Object]",Qn="[object Promise]",Xn="[object Proxy]",nt="[object RegExp]",tt="[object Set]",rt="[object String]",et="[object Symbol]",ut="[object Undefined]",it="[object WeakMap]",ot="[object WeakSet]",ft="[object ArrayBuffer]",ct="[object DataView]",at="[object Float32Array]",lt="[object Float64Array]",st="[object Int8Array]",ht="[object Int16Array]",pt="[object Int32Array]",_t="[object Uint8Array]",vt="[object Uint8ClampedArray]",gt="[object Uint16Array]",yt="[object Uint32Array]",dt=/\b__p \+= '';/g,bt=/\b(__p \+=) '' \+/g,wt=/(__e\(.*?\)|\b__t\)) \+\n'';/g,mt=/&(?:amp|lt|gt|quot|#39);/g,xt=/[&<>"']/g,jt=RegExp(mt.source),At=RegExp(xt.source),kt=/<%-([\s\S]+?)%>/g,Ot=/<%([\s\S]+?)%>/g,It=/<%=([\s\S]+?)%>/g,Rt=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,zt=/^\w*$/,Et=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g,St=/[\\^$.*+?()[\]{}|]/g,Wt=RegExp(St.source),Lt=/^\s+/,Ct=/\s/,Ut=/\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/,Bt=/\{\n\/\* \[wrapped with (.+)\] \*/,Tt=/,? & /,$t=/[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g,Dt=/[()=,{}\[\]\/\s]/,Mt=/\\(\\)?/g,Ft=/\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g,Nt=/\w*$/,Pt=/^[-+]0x[0-9a-f]+$/i,qt=/^0b[01]+$/i,Zt=/^\[object .+?Constructor\]$/,Kt=/^0o[0-7]+$/i,Vt=/^(?:0|[1-9]\d*)$/,Gt=/[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g,Ht=/($^)/,Jt=/['\n\r\u2028\u2029\\]/g,Yt="\\ud800-\\udfff",Qt="\\u0300-\\u036f",Xt="\\ufe20-\\ufe2f",nr="\\u20d0-\\u20ff",tr=Qt+Xt+nr,rr="\\u2700-\\u27bf",er="a-z\\xdf-\\xf6\\xf8-\\xff",ur="\\xac\\xb1\\xd7\\xf7",ir="\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf",or="\\u2000-\\u206f",fr=" \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000",cr="A-Z\\xc0-\\xd6\\xd8-\\xde",ar="\\ufe0e\\ufe0f",lr=ur+ir+or+fr,sr="['\u2019]",hr="["+Yt+"]",pr="["+lr+"]",_r="["+tr+"]",vr="\\d+",gr="["+rr+"]",yr="["+er+"]",dr="[^"+Yt+lr+vr+rr+er+cr+"]",br="\\ud83c[\\udffb-\\udfff]",wr="(?:"+_r+"|"+br+")",mr="[^"+Yt+"]",xr="(?:\\ud83c[\\udde6-\\uddff]){2}",jr="[\\ud800-\\udbff][\\udc00-\\udfff]",Ar="["+cr+"]",kr="\\u200d",Or="(?:"+yr+"|"+dr+")",Ir="(?:"+Ar+"|"+dr+")",Rr="(?:"+sr+"(?:d|ll|m|re|s|t|ve))?",zr="(?:"+sr+"(?:D|LL|M|RE|S|T|VE))?",Er=wr+"?",Sr="["+ar+"]?",Wr="(?:"+kr+"(?:"+[mr,xr,jr].join("|")+")"+Sr+Er+")*",Lr="\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])",Cr="\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])",Ur=Sr+Er+Wr,Br="(?:"+[gr,xr,jr].join("|")+")"+Ur,Tr="(?:"+[mr+_r+"?",_r,xr,jr,hr].join("|")+")",$r=RegExp(sr,"g"),Dr=RegExp(_r,"g"),Mr=RegExp(br+"(?="+br+")|"+Tr+Ur,"g"),Fr=RegExp([Ar+"?"+yr+"+"+Rr+"(?="+[pr,Ar,"$"].join("|")+")",Ir+"+"+zr+"(?="+[pr,Ar+Or,"$"].join("|")+")",Ar+"?"+Or+"+"+Rr,Ar+"+"+zr,Cr,Lr,vr,Br].join("|"),"g"),Nr=RegExp("["+kr+Yt+tr+ar+"]"),Pr=/[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/,qr=["Array","Buffer","DataView","Date","Error","Float32Array","Float64Array","Function","Int8Array","Int16Array","Int32Array","Map","Math","Object","Promise","RegExp","Set","String","Symbol","TypeError","Uint8Array","Uint8ClampedArray","Uint16Array","Uint32Array","WeakMap","_","clearTimeout","isFinite","parseInt","setTimeout"],Zr=-1,Kr={};
Kr[at]=Kr[lt]=Kr[st]=Kr[ht]=Kr[pt]=Kr[_t]=Kr[vt]=Kr[gt]=Kr[yt]=!0,Kr[Dn]=Kr[Mn]=Kr[ft]=Kr[Nn]=Kr[ct]=Kr[Pn]=Kr[Zn]=Kr[Kn]=Kr[Gn]=Kr[Hn]=Kr[Yn]=Kr[nt]=Kr[tt]=Kr[rt]=Kr[it]=!1;var Vr={};Vr[Dn]=Vr[Mn]=Vr[ft]=Vr[ct]=Vr[Nn]=Vr[Pn]=Vr[at]=Vr[lt]=Vr[st]=Vr[ht]=Vr[pt]=Vr[Gn]=Vr[Hn]=Vr[Yn]=Vr[nt]=Vr[tt]=Vr[rt]=Vr[et]=Vr[_t]=Vr[vt]=Vr[gt]=Vr[yt]=!0,Vr[Zn]=Vr[Kn]=Vr[it]=!1;var Gr={"\xc0":"A","\xc1":"A","\xc2":"A","\xc3":"A","\xc4":"A","\xc5":"A","\xe0":"a","\xe1":"a","\xe2":"a","\xe3":"a","\xe4":"a","\xe5":"a",
"\xc7":"C","\xe7":"c","\xd0":"D","\xf0":"d","\xc8":"E","\xc9":"E","\xca":"E","\xcb":"E","\xe8":"e","\xe9":"e","\xea":"e","\xeb":"e","\xcc":"I","\xcd":"I","\xce":"I","\xcf":"I","\xec":"i","\xed":"i","\xee":"i","\xef":"i","\xd1":"N","\xf1":"n","\xd2":"O","\xd3":"O","\xd4":"O","\xd5":"O","\xd6":"O","\xd8":"O","\xf2":"o","\xf3":"o","\xf4":"o","\xf5":"o","\xf6":"o","\xf8":"o","\xd9":"U","\xda":"U","\xdb":"U","\xdc":"U","\xf9":"u","\xfa":"u","\xfb":"u","\xfc":"u","\xdd":"Y","\xfd":"y","\xff":"y","\xc6":"Ae",
"\xe6":"ae","\xde":"Th","\xfe":"th","\xdf":"ss","\u0100":"A","\u0102":"A","\u0104":"A","\u0101":"a","\u0103":"a","\u0105":"a","\u0106":"C","\u0108":"C","\u010a":"C","\u010c":"C","\u0107":"c","\u0109":"c","\u010b":"c","\u010d":"c","\u010e":"D","\u0110":"D","\u010f":"d","\u0111":"d","\u0112":"E","\u0114":"E","\u0116":"E","\u0118":"E","\u011a":"E","\u0113":"e","\u0115":"e","\u0117":"e","\u0119":"e","\u011b":"e","\u011c":"G","\u011e":"G","\u0120":"G","\u0122":"G","\u011d":"g","\u011f":"g","\u0121":"g",
"\u0123":"g","\u0124":"H","\u0126":"H","\u0125":"h","\u0127":"h","\u0128":"I","\u012a":"I","\u012c":"I","\u012e":"I","\u0130":"I","\u0129":"i","\u012b":"i","\u012d":"i","\u012f":"i","\u0131":"i","\u0134":"J","\u0135":"j","\u0136":"K","\u0137":"k","\u0138":"k","\u0139":"L","\u013b":"L","\u013d":"L","\u013f":"L","\u0141":"L","\u013a":"l","\u013c":"l","\u013e":"l","\u0140":"l","\u0142":"l","\u0143":"N","\u0145":"N","\u0147":"N","\u014a":"N","\u0144":"n","\u0146":"n","\u0148":"n","\u014b":"n","\u014c":"O",
"\u014e":"O","\u0150":"O","\u014d":"o","\u014f":"o","\u0151":"o","\u0154":"R","\u0156":"R","\u0158":"R","\u0155":"r","\u0157":"r","\u0159":"r","\u015a":"S","\u015c":"S","\u015e":"S","\u0160":"S","\u015b":"s","\u015d":"s","\u015f":"s","\u0161":"s","\u0162":"T","\u0164":"T","\u0166":"T","\u0163":"t","\u0165":"t","\u0167":"t","\u0168":"U","\u016a":"U","\u016c":"U","\u016e":"U","\u0170":"U","\u0172":"U","\u0169":"u","\u016b":"u","\u016d":"u","\u016f":"u","\u0171":"u","\u0173":"u","\u0174":"W","\u0175":"w",
"\u0176":"Y","\u0177":"y","\u0178":"Y","\u0179":"Z","\u017b":"Z","\u017d":"Z","\u017a":"z","\u017c":"z","\u017e":"z","\u0132":"IJ","\u0133":"ij","\u0152":"Oe","\u0153":"oe","\u0149":"'n","\u017f":"s"},Hr={"&":"&","<":"<",">":">",'"':""","'":"'"},Jr={"&":"&","<":"<",">":">",""":'"',"'":"'"},Yr={"\\":"\\","'":"'","\n":"n","\r":"r","\u2028":"u2028","\u2029":"u2029"},Qr=parseFloat,Xr=parseInt,ne="object"==typeof global&&global&&global.Object===Object&&global,te="object"==typeof self&&self&&self.Object===Object&&self,re=ne||te||Function("return this")(),ee="object"==typeof exports&&exports&&!exports.nodeType&&exports,ue=ee&&"object"==typeof module&&module&&!module.nodeType&&module,ie=ue&&ue.exports===ee,oe=ie&&ne.process,fe=function(){
try{var n=ue&&ue.require&&ue.require("util").types;return n?n:oe&&oe.binding&&oe.binding("util")}catch(n){}}(),ce=fe&&fe.isArrayBuffer,ae=fe&&fe.isDate,le=fe&&fe.isMap,se=fe&&fe.isRegExp,he=fe&&fe.isSet,pe=fe&&fe.isTypedArray,_e=m("length"),ve=x(Gr),ge=x(Hr),ye=x(Jr),de=function p(x){function Z(n){if(cc(n)&&!bh(n)&&!(n instanceof Ct)){if(n instanceof Y)return n;if(bl.call(n,"__wrapped__"))return eo(n)}return new Y(n)}function J(){}function Y(n,t){this.__wrapped__=n,this.__actions__=[],this.__chain__=!!t,
this.__index__=0,this.__values__=X}function Ct(n){this.__wrapped__=n,this.__actions__=[],this.__dir__=1,this.__filtered__=!1,this.__iteratees__=[],this.__takeCount__=Un,this.__views__=[]}function $t(){var n=new Ct(this.__wrapped__);return n.__actions__=Tu(this.__actions__),n.__dir__=this.__dir__,n.__filtered__=this.__filtered__,n.__iteratees__=Tu(this.__iteratees__),n.__takeCount__=this.__takeCount__,n.__views__=Tu(this.__views__),n}function Yt(){if(this.__filtered__){var n=new Ct(this);n.__dir__=-1,
n.__filtered__=!0}else n=this.clone(),n.__dir__*=-1;return n}function Qt(){var n=this.__wrapped__.value(),t=this.__dir__,r=bh(n),e=t<0,u=r?n.length:0,i=Oi(0,u,this.__views__),o=i.start,f=i.end,c=f-o,a=e?f:o-1,l=this.__iteratees__,s=l.length,h=0,p=Hl(c,this.__takeCount__);if(!r||!e&&u==c&&p==c)return wu(n,this.__actions__);var _=[];n:for(;c--&&h<p;){a+=t;for(var v=-1,g=n[a];++v<s;){var y=l[v],d=y.iteratee,b=y.type,w=d(g);if(b==zn)g=w;else if(!w){if(b==Rn)continue n;break n}}_[h++]=g}return _}function Xt(n){
var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){var e=n[t];this.set(e[0],e[1])}}function nr(){this.__data__=is?is(null):{},this.size=0}function tr(n){var t=this.has(n)&&delete this.__data__[n];return this.size-=t?1:0,t}function rr(n){var t=this.__data__;if(is){var r=t[n];return r===on?X:r}return bl.call(t,n)?t[n]:X}function er(n){var t=this.__data__;return is?t[n]!==X:bl.call(t,n)}function ur(n,t){var r=this.__data__;return this.size+=this.has(n)?0:1,r[n]=is&&t===X?on:t,this}function ir(n){
var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){var e=n[t];this.set(e[0],e[1])}}function or(){this.__data__=[],this.size=0}function fr(n){var t=this.__data__,r=Wr(t,n);return!(r<0)&&(r==t.length-1?t.pop():Ll.call(t,r,1),--this.size,!0)}function cr(n){var t=this.__data__,r=Wr(t,n);return r<0?X:t[r][1]}function ar(n){return Wr(this.__data__,n)>-1}function lr(n,t){var r=this.__data__,e=Wr(r,n);return e<0?(++this.size,r.push([n,t])):r[e][1]=t,this}function sr(n){var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){
var e=n[t];this.set(e[0],e[1])}}function hr(){this.size=0,this.__data__={hash:new Xt,map:new(ts||ir),string:new Xt}}function pr(n){var t=xi(this,n).delete(n);return this.size-=t?1:0,t}function _r(n){return xi(this,n).get(n)}function vr(n){return xi(this,n).has(n)}function gr(n,t){var r=xi(this,n),e=r.size;return r.set(n,t),this.size+=r.size==e?0:1,this}function yr(n){var t=-1,r=null==n?0:n.length;for(this.__data__=new sr;++t<r;)this.add(n[t])}function dr(n){return this.__data__.set(n,on),this}function br(n){
return this.__data__.has(n)}function wr(n){this.size=(this.__data__=new ir(n)).size}function mr(){this.__data__=new ir,this.size=0}function xr(n){var t=this.__data__,r=t.delete(n);return this.size=t.size,r}function jr(n){return this.__data__.get(n)}function Ar(n){return this.__data__.has(n)}function kr(n,t){var r=this.__data__;if(r instanceof ir){var e=r.__data__;if(!ts||e.length<tn-1)return e.push([n,t]),this.size=++r.size,this;r=this.__data__=new sr(e)}return r.set(n,t),this.size=r.size,this}function Or(n,t){
var r=bh(n),e=!r&&dh(n),u=!r&&!e&&mh(n),i=!r&&!e&&!u&&Oh(n),o=r||e||u||i,f=o?O(n.length,hl):[],c=f.length;for(var a in n)!t&&!bl.call(n,a)||o&&("length"==a||u&&("offset"==a||"parent"==a)||i&&("buffer"==a||"byteLength"==a||"byteOffset"==a)||Ci(a,c))||f.push(a);return f}function Ir(n){var t=n.length;return t?n[tu(0,t-1)]:X}function Rr(n,t){return Xi(Tu(n),Mr(t,0,n.length))}function zr(n){return Xi(Tu(n))}function Er(n,t,r){(r===X||Gf(n[t],r))&&(r!==X||t in n)||Br(n,t,r)}function Sr(n,t,r){var e=n[t];
bl.call(n,t)&&Gf(e,r)&&(r!==X||t in n)||Br(n,t,r)}function Wr(n,t){for(var r=n.length;r--;)if(Gf(n[r][0],t))return r;return-1}function Lr(n,t,r,e){return ys(n,function(n,u,i){t(e,n,r(n),i)}),e}function Cr(n,t){return n&&$u(t,Pc(t),n)}function Ur(n,t){return n&&$u(t,qc(t),n)}function Br(n,t,r){"__proto__"==t&&Tl?Tl(n,t,{configurable:!0,enumerable:!0,value:r,writable:!0}):n[t]=r}function Tr(n,t){for(var r=-1,e=t.length,u=il(e),i=null==n;++r<e;)u[r]=i?X:Mc(n,t[r]);return u}function Mr(n,t,r){return n===n&&(r!==X&&(n=n<=r?n:r),
t!==X&&(n=n>=t?n:t)),n}function Fr(n,t,e,u,i,o){var f,c=t&an,a=t&ln,l=t&sn;if(e&&(f=i?e(n,u,i,o):e(n)),f!==X)return f;if(!fc(n))return n;var s=bh(n);if(s){if(f=zi(n),!c)return Tu(n,f)}else{var h=zs(n),p=h==Kn||h==Vn;if(mh(n))return Iu(n,c);if(h==Yn||h==Dn||p&&!i){if(f=a||p?{}:Ei(n),!c)return a?Mu(n,Ur(f,n)):Du(n,Cr(f,n))}else{if(!Vr[h])return i?n:{};f=Si(n,h,c)}}o||(o=new wr);var _=o.get(n);if(_)return _;o.set(n,f),kh(n)?n.forEach(function(r){f.add(Fr(r,t,e,r,n,o))}):jh(n)&&n.forEach(function(r,u){
f.set(u,Fr(r,t,e,u,n,o))});var v=l?a?di:yi:a?qc:Pc,g=s?X:v(n);return r(g||n,function(r,u){g&&(u=r,r=n[u]),Sr(f,u,Fr(r,t,e,u,n,o))}),f}function Nr(n){var t=Pc(n);return function(r){return Pr(r,n,t)}}function Pr(n,t,r){var e=r.length;if(null==n)return!e;for(n=ll(n);e--;){var u=r[e],i=t[u],o=n[u];if(o===X&&!(u in n)||!i(o))return!1}return!0}function Gr(n,t,r){if("function"!=typeof n)throw new pl(en);return Ws(function(){n.apply(X,r)},t)}function Hr(n,t,r,e){var u=-1,i=o,a=!0,l=n.length,s=[],h=t.length;
if(!l)return s;r&&(t=c(t,z(r))),e?(i=f,a=!1):t.length>=tn&&(i=S,a=!1,t=new yr(t));n:for(;++u<l;){var p=n[u],_=null==r?p:r(p);if(p=e||0!==p?p:0,a&&_===_){for(var v=h;v--;)if(t[v]===_)continue n;s.push(p)}else i(t,_,e)||s.push(p)}return s}function Jr(n,t){var r=!0;return ys(n,function(n,e,u){return r=!!t(n,e,u)}),r}function Yr(n,t,r){for(var e=-1,u=n.length;++e<u;){var i=n[e],o=t(i);if(null!=o&&(f===X?o===o&&!bc(o):r(o,f)))var f=o,c=i}return c}function ne(n,t,r,e){var u=n.length;for(r=kc(r),r<0&&(r=-r>u?0:u+r),
e=e===X||e>u?u:kc(e),e<0&&(e+=u),e=r>e?0:Oc(e);r<e;)n[r++]=t;return n}function te(n,t){var r=[];return ys(n,function(n,e,u){t(n,e,u)&&r.push(n)}),r}function ee(n,t,r,e,u){var i=-1,o=n.length;for(r||(r=Li),u||(u=[]);++i<o;){var f=n[i];t>0&&r(f)?t>1?ee(f,t-1,r,e,u):a(u,f):e||(u[u.length]=f)}return u}function ue(n,t){return n&&bs(n,t,Pc)}function oe(n,t){return n&&ws(n,t,Pc)}function fe(n,t){return i(t,function(t){return uc(n[t])})}function _e(n,t){t=ku(t,n);for(var r=0,e=t.length;null!=n&&r<e;)n=n[no(t[r++])];
return r&&r==e?n:X}function de(n,t,r){var e=t(n);return bh(n)?e:a(e,r(n))}function we(n){return null==n?n===X?ut:Jn:Bl&&Bl in ll(n)?ki(n):Ki(n)}function me(n,t){return n>t}function xe(n,t){return null!=n&&bl.call(n,t)}function je(n,t){return null!=n&&t in ll(n)}function Ae(n,t,r){return n>=Hl(t,r)&&n<Gl(t,r)}function ke(n,t,r){for(var e=r?f:o,u=n[0].length,i=n.length,a=i,l=il(i),s=1/0,h=[];a--;){var p=n[a];a&&t&&(p=c(p,z(t))),s=Hl(p.length,s),l[a]=!r&&(t||u>=120&&p.length>=120)?new yr(a&&p):X}p=n[0];
var _=-1,v=l[0];n:for(;++_<u&&h.length<s;){var g=p[_],y=t?t(g):g;if(g=r||0!==g?g:0,!(v?S(v,y):e(h,y,r))){for(a=i;--a;){var d=l[a];if(!(d?S(d,y):e(n[a],y,r)))continue n}v&&v.push(y),h.push(g)}}return h}function Oe(n,t,r,e){return ue(n,function(n,u,i){t(e,r(n),u,i)}),e}function Ie(t,r,e){r=ku(r,t),t=Gi(t,r);var u=null==t?t:t[no(jo(r))];return null==u?X:n(u,t,e)}function Re(n){return cc(n)&&we(n)==Dn}function ze(n){return cc(n)&&we(n)==ft}function Ee(n){return cc(n)&&we(n)==Pn}function Se(n,t,r,e,u){
return n===t||(null==n||null==t||!cc(n)&&!cc(t)?n!==n&&t!==t:We(n,t,r,e,Se,u))}function We(n,t,r,e,u,i){var o=bh(n),f=bh(t),c=o?Mn:zs(n),a=f?Mn:zs(t);c=c==Dn?Yn:c,a=a==Dn?Yn:a;var l=c==Yn,s=a==Yn,h=c==a;if(h&&mh(n)){if(!mh(t))return!1;o=!0,l=!1}if(h&&!l)return i||(i=new wr),o||Oh(n)?pi(n,t,r,e,u,i):_i(n,t,c,r,e,u,i);if(!(r&hn)){var p=l&&bl.call(n,"__wrapped__"),_=s&&bl.call(t,"__wrapped__");if(p||_){var v=p?n.value():n,g=_?t.value():t;return i||(i=new wr),u(v,g,r,e,i)}}return!!h&&(i||(i=new wr),vi(n,t,r,e,u,i));
}function Le(n){return cc(n)&&zs(n)==Gn}function Ce(n,t,r,e){var u=r.length,i=u,o=!e;if(null==n)return!i;for(n=ll(n);u--;){var f=r[u];if(o&&f[2]?f[1]!==n[f[0]]:!(f[0]in n))return!1}for(;++u<i;){f=r[u];var c=f[0],a=n[c],l=f[1];if(o&&f[2]){if(a===X&&!(c in n))return!1}else{var s=new wr;if(e)var h=e(a,l,c,n,t,s);if(!(h===X?Se(l,a,hn|pn,e,s):h))return!1}}return!0}function Ue(n){return!(!fc(n)||Di(n))&&(uc(n)?kl:Zt).test(to(n))}function Be(n){return cc(n)&&we(n)==nt}function Te(n){return cc(n)&&zs(n)==tt;
}function $e(n){return cc(n)&&oc(n.length)&&!!Kr[we(n)]}function De(n){return"function"==typeof n?n:null==n?La:"object"==typeof n?bh(n)?Ze(n[0],n[1]):qe(n):Fa(n)}function Me(n){if(!Mi(n))return Vl(n);var t=[];for(var r in ll(n))bl.call(n,r)&&"constructor"!=r&&t.push(r);return t}function Fe(n){if(!fc(n))return Zi(n);var t=Mi(n),r=[];for(var e in n)("constructor"!=e||!t&&bl.call(n,e))&&r.push(e);return r}function Ne(n,t){return n<t}function Pe(n,t){var r=-1,e=Hf(n)?il(n.length):[];return ys(n,function(n,u,i){
e[++r]=t(n,u,i)}),e}function qe(n){var t=ji(n);return 1==t.length&&t[0][2]?Ni(t[0][0],t[0][1]):function(r){return r===n||Ce(r,n,t)}}function Ze(n,t){return Bi(n)&&Fi(t)?Ni(no(n),t):function(r){var e=Mc(r,n);return e===X&&e===t?Nc(r,n):Se(t,e,hn|pn)}}function Ke(n,t,r,e,u){n!==t&&bs(t,function(i,o){if(u||(u=new wr),fc(i))Ve(n,t,o,r,Ke,e,u);else{var f=e?e(Ji(n,o),i,o+"",n,t,u):X;f===X&&(f=i),Er(n,o,f)}},qc)}function Ve(n,t,r,e,u,i,o){var f=Ji(n,r),c=Ji(t,r),a=o.get(c);if(a)return Er(n,r,a),X;var l=i?i(f,c,r+"",n,t,o):X,s=l===X;
if(s){var h=bh(c),p=!h&&mh(c),_=!h&&!p&&Oh(c);l=c,h||p||_?bh(f)?l=f:Jf(f)?l=Tu(f):p?(s=!1,l=Iu(c,!0)):_?(s=!1,l=Wu(c,!0)):l=[]:gc(c)||dh(c)?(l=f,dh(f)?l=Rc(f):fc(f)&&!uc(f)||(l=Ei(c))):s=!1}s&&(o.set(c,l),u(l,c,e,i,o),o.delete(c)),Er(n,r,l)}function Ge(n,t){var r=n.length;if(r)return t+=t<0?r:0,Ci(t,r)?n[t]:X}function He(n,t,r){t=t.length?c(t,function(n){return bh(n)?function(t){return _e(t,1===n.length?n[0]:n)}:n}):[La];var e=-1;return t=c(t,z(mi())),A(Pe(n,function(n,r,u){return{criteria:c(t,function(t){
return t(n)}),index:++e,value:n}}),function(n,t){return Cu(n,t,r)})}function Je(n,t){return Ye(n,t,function(t,r){return Nc(n,r)})}function Ye(n,t,r){for(var e=-1,u=t.length,i={};++e<u;){var o=t[e],f=_e(n,o);r(f,o)&&fu(i,ku(o,n),f)}return i}function Qe(n){return function(t){return _e(t,n)}}function Xe(n,t,r,e){var u=e?d:y,i=-1,o=t.length,f=n;for(n===t&&(t=Tu(t)),r&&(f=c(n,z(r)));++i<o;)for(var a=0,l=t[i],s=r?r(l):l;(a=u(f,s,a,e))>-1;)f!==n&&Ll.call(f,a,1),Ll.call(n,a,1);return n}function nu(n,t){for(var r=n?t.length:0,e=r-1;r--;){
var u=t[r];if(r==e||u!==i){var i=u;Ci(u)?Ll.call(n,u,1):yu(n,u)}}return n}function tu(n,t){return n+Nl(Ql()*(t-n+1))}function ru(n,t,r,e){for(var u=-1,i=Gl(Fl((t-n)/(r||1)),0),o=il(i);i--;)o[e?i:++u]=n,n+=r;return o}function eu(n,t){var r="";if(!n||t<1||t>Wn)return r;do t%2&&(r+=n),t=Nl(t/2),t&&(n+=n);while(t);return r}function uu(n,t){return Ls(Vi(n,t,La),n+"")}function iu(n){return Ir(ra(n))}function ou(n,t){var r=ra(n);return Xi(r,Mr(t,0,r.length))}function fu(n,t,r,e){if(!fc(n))return n;t=ku(t,n);
for(var u=-1,i=t.length,o=i-1,f=n;null!=f&&++u<i;){var c=no(t[u]),a=r;if("__proto__"===c||"constructor"===c||"prototype"===c)return n;if(u!=o){var l=f[c];a=e?e(l,c,f):X,a===X&&(a=fc(l)?l:Ci(t[u+1])?[]:{})}Sr(f,c,a),f=f[c]}return n}function cu(n){return Xi(ra(n))}function au(n,t,r){var e=-1,u=n.length;t<0&&(t=-t>u?0:u+t),r=r>u?u:r,r<0&&(r+=u),u=t>r?0:r-t>>>0,t>>>=0;for(var i=il(u);++e<u;)i[e]=n[e+t];return i}function lu(n,t){var r;return ys(n,function(n,e,u){return r=t(n,e,u),!r}),!!r}function su(n,t,r){
var e=0,u=null==n?e:n.length;if("number"==typeof t&&t===t&&u<=Tn){for(;e<u;){var i=e+u>>>1,o=n[i];null!==o&&!bc(o)&&(r?o<=t:o<t)?e=i+1:u=i}return u}return hu(n,t,La,r)}function hu(n,t,r,e){var u=0,i=null==n?0:n.length;if(0===i)return 0;t=r(t);for(var o=t!==t,f=null===t,c=bc(t),a=t===X;u<i;){var l=Nl((u+i)/2),s=r(n[l]),h=s!==X,p=null===s,_=s===s,v=bc(s);if(o)var g=e||_;else g=a?_&&(e||h):f?_&&h&&(e||!p):c?_&&h&&!p&&(e||!v):!p&&!v&&(e?s<=t:s<t);g?u=l+1:i=l}return Hl(i,Bn)}function pu(n,t){for(var r=-1,e=n.length,u=0,i=[];++r<e;){
var o=n[r],f=t?t(o):o;if(!r||!Gf(f,c)){var c=f;i[u++]=0===o?0:o}}return i}function _u(n){return"number"==typeof n?n:bc(n)?Cn:+n}function vu(n){if("string"==typeof n)return n;if(bh(n))return c(n,vu)+"";if(bc(n))return vs?vs.call(n):"";var t=n+"";return"0"==t&&1/n==-Sn?"-0":t}function gu(n,t,r){var e=-1,u=o,i=n.length,c=!0,a=[],l=a;if(r)c=!1,u=f;else if(i>=tn){var s=t?null:ks(n);if(s)return P(s);c=!1,u=S,l=new yr}else l=t?[]:a;n:for(;++e<i;){var h=n[e],p=t?t(h):h;if(h=r||0!==h?h:0,c&&p===p){for(var _=l.length;_--;)if(l[_]===p)continue n;
t&&l.push(p),a.push(h)}else u(l,p,r)||(l!==a&&l.push(p),a.push(h))}return a}function yu(n,t){return t=ku(t,n),n=Gi(n,t),null==n||delete n[no(jo(t))]}function du(n,t,r,e){return fu(n,t,r(_e(n,t)),e)}function bu(n,t,r,e){for(var u=n.length,i=e?u:-1;(e?i--:++i<u)&&t(n[i],i,n););return r?au(n,e?0:i,e?i+1:u):au(n,e?i+1:0,e?u:i)}function wu(n,t){var r=n;return r instanceof Ct&&(r=r.value()),l(t,function(n,t){return t.func.apply(t.thisArg,a([n],t.args))},r)}function mu(n,t,r){var e=n.length;if(e<2)return e?gu(n[0]):[];
for(var u=-1,i=il(e);++u<e;)for(var o=n[u],f=-1;++f<e;)f!=u&&(i[u]=Hr(i[u]||o,n[f],t,r));return gu(ee(i,1),t,r)}function xu(n,t,r){for(var e=-1,u=n.length,i=t.length,o={};++e<u;){r(o,n[e],e<i?t[e]:X)}return o}function ju(n){return Jf(n)?n:[]}function Au(n){return"function"==typeof n?n:La}function ku(n,t){return bh(n)?n:Bi(n,t)?[n]:Cs(Ec(n))}function Ou(n,t,r){var e=n.length;return r=r===X?e:r,!t&&r>=e?n:au(n,t,r)}function Iu(n,t){if(t)return n.slice();var r=n.length,e=zl?zl(r):new n.constructor(r);
return n.copy(e),e}function Ru(n){var t=new n.constructor(n.byteLength);return new Rl(t).set(new Rl(n)),t}function zu(n,t){return new n.constructor(t?Ru(n.buffer):n.buffer,n.byteOffset,n.byteLength)}function Eu(n){var t=new n.constructor(n.source,Nt.exec(n));return t.lastIndex=n.lastIndex,t}function Su(n){return _s?ll(_s.call(n)):{}}function Wu(n,t){return new n.constructor(t?Ru(n.buffer):n.buffer,n.byteOffset,n.length)}function Lu(n,t){if(n!==t){var r=n!==X,e=null===n,u=n===n,i=bc(n),o=t!==X,f=null===t,c=t===t,a=bc(t);
if(!f&&!a&&!i&&n>t||i&&o&&c&&!f&&!a||e&&o&&c||!r&&c||!u)return 1;if(!e&&!i&&!a&&n<t||a&&r&&u&&!e&&!i||f&&r&&u||!o&&u||!c)return-1}return 0}function Cu(n,t,r){for(var e=-1,u=n.criteria,i=t.criteria,o=u.length,f=r.length;++e<o;){var c=Lu(u[e],i[e]);if(c){if(e>=f)return c;return c*("desc"==r[e]?-1:1)}}return n.index-t.index}function Uu(n,t,r,e){for(var u=-1,i=n.length,o=r.length,f=-1,c=t.length,a=Gl(i-o,0),l=il(c+a),s=!e;++f<c;)l[f]=t[f];for(;++u<o;)(s||u<i)&&(l[r[u]]=n[u]);for(;a--;)l[f++]=n[u++];return l;
}function Bu(n,t,r,e){for(var u=-1,i=n.length,o=-1,f=r.length,c=-1,a=t.length,l=Gl(i-f,0),s=il(l+a),h=!e;++u<l;)s[u]=n[u];for(var p=u;++c<a;)s[p+c]=t[c];for(;++o<f;)(h||u<i)&&(s[p+r[o]]=n[u++]);return s}function Tu(n,t){var r=-1,e=n.length;for(t||(t=il(e));++r<e;)t[r]=n[r];return t}function $u(n,t,r,e){var u=!r;r||(r={});for(var i=-1,o=t.length;++i<o;){var f=t[i],c=e?e(r[f],n[f],f,r,n):X;c===X&&(c=n[f]),u?Br(r,f,c):Sr(r,f,c)}return r}function Du(n,t){return $u(n,Is(n),t)}function Mu(n,t){return $u(n,Rs(n),t);
}function Fu(n,r){return function(e,u){var i=bh(e)?t:Lr,o=r?r():{};return i(e,n,mi(u,2),o)}}function Nu(n){return uu(function(t,r){var e=-1,u=r.length,i=u>1?r[u-1]:X,o=u>2?r[2]:X;for(i=n.length>3&&"function"==typeof i?(u--,i):X,o&&Ui(r[0],r[1],o)&&(i=u<3?X:i,u=1),t=ll(t);++e<u;){var f=r[e];f&&n(t,f,e,i)}return t})}function Pu(n,t){return function(r,e){if(null==r)return r;if(!Hf(r))return n(r,e);for(var u=r.length,i=t?u:-1,o=ll(r);(t?i--:++i<u)&&e(o[i],i,o)!==!1;);return r}}function qu(n){return function(t,r,e){
for(var u=-1,i=ll(t),o=e(t),f=o.length;f--;){var c=o[n?f:++u];if(r(i[c],c,i)===!1)break}return t}}function Zu(n,t,r){function e(){return(this&&this!==re&&this instanceof e?i:n).apply(u?r:this,arguments)}var u=t&_n,i=Gu(n);return e}function Ku(n){return function(t){t=Ec(t);var r=T(t)?G(t):X,e=r?r[0]:t.charAt(0),u=r?Ou(r,1).join(""):t.slice(1);return e[n]()+u}}function Vu(n){return function(t){return l(Ra(ca(t).replace($r,"")),n,"")}}function Gu(n){return function(){var t=arguments;switch(t.length){
case 0:return new n;case 1:return new n(t[0]);case 2:return new n(t[0],t[1]);case 3:return new n(t[0],t[1],t[2]);case 4:return new n(t[0],t[1],t[2],t[3]);case 5:return new n(t[0],t[1],t[2],t[3],t[4]);case 6:return new n(t[0],t[1],t[2],t[3],t[4],t[5]);case 7:return new n(t[0],t[1],t[2],t[3],t[4],t[5],t[6])}var r=gs(n.prototype),e=n.apply(r,t);return fc(e)?e:r}}function Hu(t,r,e){function u(){for(var o=arguments.length,f=il(o),c=o,a=wi(u);c--;)f[c]=arguments[c];var l=o<3&&f[0]!==a&&f[o-1]!==a?[]:N(f,a);
return o-=l.length,o<e?oi(t,r,Qu,u.placeholder,X,f,l,X,X,e-o):n(this&&this!==re&&this instanceof u?i:t,this,f)}var i=Gu(t);return u}function Ju(n){return function(t,r,e){var u=ll(t);if(!Hf(t)){var i=mi(r,3);t=Pc(t),r=function(n){return i(u[n],n,u)}}var o=n(t,r,e);return o>-1?u[i?t[o]:o]:X}}function Yu(n){return gi(function(t){var r=t.length,e=r,u=Y.prototype.thru;for(n&&t.reverse();e--;){var i=t[e];if("function"!=typeof i)throw new pl(en);if(u&&!o&&"wrapper"==bi(i))var o=new Y([],!0)}for(e=o?e:r;++e<r;){
i=t[e];var f=bi(i),c="wrapper"==f?Os(i):X;o=c&&$i(c[0])&&c[1]==(mn|yn|bn|xn)&&!c[4].length&&1==c[9]?o[bi(c[0])].apply(o,c[3]):1==i.length&&$i(i)?o[f]():o.thru(i)}return function(){var n=arguments,e=n[0];if(o&&1==n.length&&bh(e))return o.plant(e).value();for(var u=0,i=r?t[u].apply(this,n):e;++u<r;)i=t[u].call(this,i);return i}})}function Qu(n,t,r,e,u,i,o,f,c,a){function l(){for(var y=arguments.length,d=il(y),b=y;b--;)d[b]=arguments[b];if(_)var w=wi(l),m=C(d,w);if(e&&(d=Uu(d,e,u,_)),i&&(d=Bu(d,i,o,_)),
y-=m,_&&y<a){return oi(n,t,Qu,l.placeholder,r,d,N(d,w),f,c,a-y)}var x=h?r:this,j=p?x[n]:n;return y=d.length,f?d=Hi(d,f):v&&y>1&&d.reverse(),s&&c<y&&(d.length=c),this&&this!==re&&this instanceof l&&(j=g||Gu(j)),j.apply(x,d)}var s=t&mn,h=t&_n,p=t&vn,_=t&(yn|dn),v=t&jn,g=p?X:Gu(n);return l}function Xu(n,t){return function(r,e){return Oe(r,n,t(e),{})}}function ni(n,t){return function(r,e){var u;if(r===X&&e===X)return t;if(r!==X&&(u=r),e!==X){if(u===X)return e;"string"==typeof r||"string"==typeof e?(r=vu(r),
e=vu(e)):(r=_u(r),e=_u(e)),u=n(r,e)}return u}}function ti(t){return gi(function(r){return r=c(r,z(mi())),uu(function(e){var u=this;return t(r,function(t){return n(t,u,e)})})})}function ri(n,t){t=t===X?" ":vu(t);var r=t.length;if(r<2)return r?eu(t,n):t;var e=eu(t,Fl(n/V(t)));return T(t)?Ou(G(e),0,n).join(""):e.slice(0,n)}function ei(t,r,e,u){function i(){for(var r=-1,c=arguments.length,a=-1,l=u.length,s=il(l+c),h=this&&this!==re&&this instanceof i?f:t;++a<l;)s[a]=u[a];for(;c--;)s[a++]=arguments[++r];
return n(h,o?e:this,s)}var o=r&_n,f=Gu(t);return i}function ui(n){return function(t,r,e){return e&&"number"!=typeof e&&Ui(t,r,e)&&(r=e=X),t=Ac(t),r===X?(r=t,t=0):r=Ac(r),e=e===X?t<r?1:-1:Ac(e),ru(t,r,e,n)}}function ii(n){return function(t,r){return"string"==typeof t&&"string"==typeof r||(t=Ic(t),r=Ic(r)),n(t,r)}}function oi(n,t,r,e,u,i,o,f,c,a){var l=t&yn,s=l?o:X,h=l?X:o,p=l?i:X,_=l?X:i;t|=l?bn:wn,t&=~(l?wn:bn),t&gn||(t&=~(_n|vn));var v=[n,t,u,p,s,_,h,f,c,a],g=r.apply(X,v);return $i(n)&&Ss(g,v),g.placeholder=e,
Yi(g,n,t)}function fi(n){var t=al[n];return function(n,r){if(n=Ic(n),r=null==r?0:Hl(kc(r),292),r&&Zl(n)){var e=(Ec(n)+"e").split("e");return e=(Ec(t(e[0]+"e"+(+e[1]+r)))+"e").split("e"),+(e[0]+"e"+(+e[1]-r))}return t(n)}}function ci(n){return function(t){var r=zs(t);return r==Gn?M(t):r==tt?q(t):I(t,n(t))}}function ai(n,t,r,e,u,i,o,f){var c=t&vn;if(!c&&"function"!=typeof n)throw new pl(en);var a=e?e.length:0;if(a||(t&=~(bn|wn),e=u=X),o=o===X?o:Gl(kc(o),0),f=f===X?f:kc(f),a-=u?u.length:0,t&wn){var l=e,s=u;
e=u=X}var h=c?X:Os(n),p=[n,t,r,e,u,l,s,i,o,f];if(h&&qi(p,h),n=p[0],t=p[1],r=p[2],e=p[3],u=p[4],f=p[9]=p[9]===X?c?0:n.length:Gl(p[9]-a,0),!f&&t&(yn|dn)&&(t&=~(yn|dn)),t&&t!=_n)_=t==yn||t==dn?Hu(n,t,f):t!=bn&&t!=(_n|bn)||u.length?Qu.apply(X,p):ei(n,t,r,e);else var _=Zu(n,t,r);return Yi((h?ms:Ss)(_,p),n,t)}function li(n,t,r,e){return n===X||Gf(n,gl[r])&&!bl.call(e,r)?t:n}function si(n,t,r,e,u,i){return fc(n)&&fc(t)&&(i.set(t,n),Ke(n,t,X,si,i),i.delete(t)),n}function hi(n){return gc(n)?X:n}function pi(n,t,r,e,u,i){
var o=r&hn,f=n.length,c=t.length;if(f!=c&&!(o&&c>f))return!1;var a=i.get(n),l=i.get(t);if(a&&l)return a==t&&l==n;var s=-1,p=!0,_=r&pn?new yr:X;for(i.set(n,t),i.set(t,n);++s<f;){var v=n[s],g=t[s];if(e)var y=o?e(g,v,s,t,n,i):e(v,g,s,n,t,i);if(y!==X){if(y)continue;p=!1;break}if(_){if(!h(t,function(n,t){if(!S(_,t)&&(v===n||u(v,n,r,e,i)))return _.push(t)})){p=!1;break}}else if(v!==g&&!u(v,g,r,e,i)){p=!1;break}}return i.delete(n),i.delete(t),p}function _i(n,t,r,e,u,i,o){switch(r){case ct:if(n.byteLength!=t.byteLength||n.byteOffset!=t.byteOffset)return!1;
n=n.buffer,t=t.buffer;case ft:return!(n.byteLength!=t.byteLength||!i(new Rl(n),new Rl(t)));case Nn:case Pn:case Hn:return Gf(+n,+t);case Zn:return n.name==t.name&&n.message==t.message;case nt:case rt:return n==t+"";case Gn:var f=M;case tt:var c=e&hn;if(f||(f=P),n.size!=t.size&&!c)return!1;var a=o.get(n);if(a)return a==t;e|=pn,o.set(n,t);var l=pi(f(n),f(t),e,u,i,o);return o.delete(n),l;case et:if(_s)return _s.call(n)==_s.call(t)}return!1}function vi(n,t,r,e,u,i){var o=r&hn,f=yi(n),c=f.length;if(c!=yi(t).length&&!o)return!1;
for(var a=c;a--;){var l=f[a];if(!(o?l in t:bl.call(t,l)))return!1}var s=i.get(n),h=i.get(t);if(s&&h)return s==t&&h==n;var p=!0;i.set(n,t),i.set(t,n);for(var _=o;++a<c;){l=f[a];var v=n[l],g=t[l];if(e)var y=o?e(g,v,l,t,n,i):e(v,g,l,n,t,i);if(!(y===X?v===g||u(v,g,r,e,i):y)){p=!1;break}_||(_="constructor"==l)}if(p&&!_){var d=n.constructor,b=t.constructor;d!=b&&"constructor"in n&&"constructor"in t&&!("function"==typeof d&&d instanceof d&&"function"==typeof b&&b instanceof b)&&(p=!1)}return i.delete(n),
i.delete(t),p}function gi(n){return Ls(Vi(n,X,_o),n+"")}function yi(n){return de(n,Pc,Is)}function di(n){return de(n,qc,Rs)}function bi(n){for(var t=n.name+"",r=fs[t],e=bl.call(fs,t)?r.length:0;e--;){var u=r[e],i=u.func;if(null==i||i==n)return u.name}return t}function wi(n){return(bl.call(Z,"placeholder")?Z:n).placeholder}function mi(){var n=Z.iteratee||Ca;return n=n===Ca?De:n,arguments.length?n(arguments[0],arguments[1]):n}function xi(n,t){var r=n.__data__;return Ti(t)?r["string"==typeof t?"string":"hash"]:r.map;
}function ji(n){for(var t=Pc(n),r=t.length;r--;){var e=t[r],u=n[e];t[r]=[e,u,Fi(u)]}return t}function Ai(n,t){var r=B(n,t);return Ue(r)?r:X}function ki(n){var t=bl.call(n,Bl),r=n[Bl];try{n[Bl]=X;var e=!0}catch(n){}var u=xl.call(n);return e&&(t?n[Bl]=r:delete n[Bl]),u}function Oi(n,t,r){for(var e=-1,u=r.length;++e<u;){var i=r[e],o=i.size;switch(i.type){case"drop":n+=o;break;case"dropRight":t-=o;break;case"take":t=Hl(t,n+o);break;case"takeRight":n=Gl(n,t-o)}}return{start:n,end:t}}function Ii(n){var t=n.match(Bt);
return t?t[1].split(Tt):[]}function Ri(n,t,r){t=ku(t,n);for(var e=-1,u=t.length,i=!1;++e<u;){var o=no(t[e]);if(!(i=null!=n&&r(n,o)))break;n=n[o]}return i||++e!=u?i:(u=null==n?0:n.length,!!u&&oc(u)&&Ci(o,u)&&(bh(n)||dh(n)))}function zi(n){var t=n.length,r=new n.constructor(t);return t&&"string"==typeof n[0]&&bl.call(n,"index")&&(r.index=n.index,r.input=n.input),r}function Ei(n){return"function"!=typeof n.constructor||Mi(n)?{}:gs(El(n))}function Si(n,t,r){var e=n.constructor;switch(t){case ft:return Ru(n);
case Nn:case Pn:return new e(+n);case ct:return zu(n,r);case at:case lt:case st:case ht:case pt:case _t:case vt:case gt:case yt:return Wu(n,r);case Gn:return new e;case Hn:case rt:return new e(n);case nt:return Eu(n);case tt:return new e;case et:return Su(n)}}function Wi(n,t){var r=t.length;if(!r)return n;var e=r-1;return t[e]=(r>1?"& ":"")+t[e],t=t.join(r>2?", ":" "),n.replace(Ut,"{\n/* [wrapped with "+t+"] */\n")}function Li(n){return bh(n)||dh(n)||!!(Cl&&n&&n[Cl])}function Ci(n,t){var r=typeof n;
return t=null==t?Wn:t,!!t&&("number"==r||"symbol"!=r&&Vt.test(n))&&n>-1&&n%1==0&&n<t}function Ui(n,t,r){if(!fc(r))return!1;var e=typeof t;return!!("number"==e?Hf(r)&&Ci(t,r.length):"string"==e&&t in r)&&Gf(r[t],n)}function Bi(n,t){if(bh(n))return!1;var r=typeof n;return!("number"!=r&&"symbol"!=r&&"boolean"!=r&&null!=n&&!bc(n))||(zt.test(n)||!Rt.test(n)||null!=t&&n in ll(t))}function Ti(n){var t=typeof n;return"string"==t||"number"==t||"symbol"==t||"boolean"==t?"__proto__"!==n:null===n}function $i(n){
var t=bi(n),r=Z[t];if("function"!=typeof r||!(t in Ct.prototype))return!1;if(n===r)return!0;var e=Os(r);return!!e&&n===e[0]}function Di(n){return!!ml&&ml in n}function Mi(n){var t=n&&n.constructor;return n===("function"==typeof t&&t.prototype||gl)}function Fi(n){return n===n&&!fc(n)}function Ni(n,t){return function(r){return null!=r&&(r[n]===t&&(t!==X||n in ll(r)))}}function Pi(n){var t=Cf(n,function(n){return r.size===fn&&r.clear(),n}),r=t.cache;return t}function qi(n,t){var r=n[1],e=t[1],u=r|e,i=u<(_n|vn|mn),o=e==mn&&r==yn||e==mn&&r==xn&&n[7].length<=t[8]||e==(mn|xn)&&t[7].length<=t[8]&&r==yn;
if(!i&&!o)return n;e&_n&&(n[2]=t[2],u|=r&_n?0:gn);var f=t[3];if(f){var c=n[3];n[3]=c?Uu(c,f,t[4]):f,n[4]=c?N(n[3],cn):t[4]}return f=t[5],f&&(c=n[5],n[5]=c?Bu(c,f,t[6]):f,n[6]=c?N(n[5],cn):t[6]),f=t[7],f&&(n[7]=f),e&mn&&(n[8]=null==n[8]?t[8]:Hl(n[8],t[8])),null==n[9]&&(n[9]=t[9]),n[0]=t[0],n[1]=u,n}function Zi(n){var t=[];if(null!=n)for(var r in ll(n))t.push(r);return t}function Ki(n){return xl.call(n)}function Vi(t,r,e){return r=Gl(r===X?t.length-1:r,0),function(){for(var u=arguments,i=-1,o=Gl(u.length-r,0),f=il(o);++i<o;)f[i]=u[r+i];
i=-1;for(var c=il(r+1);++i<r;)c[i]=u[i];return c[r]=e(f),n(t,this,c)}}function Gi(n,t){return t.length<2?n:_e(n,au(t,0,-1))}function Hi(n,t){for(var r=n.length,e=Hl(t.length,r),u=Tu(n);e--;){var i=t[e];n[e]=Ci(i,r)?u[i]:X}return n}function Ji(n,t){if(("constructor"!==t||"function"!=typeof n[t])&&"__proto__"!=t)return n[t]}function Yi(n,t,r){var e=t+"";return Ls(n,Wi(e,ro(Ii(e),r)))}function Qi(n){var t=0,r=0;return function(){var e=Jl(),u=In-(e-r);if(r=e,u>0){if(++t>=On)return arguments[0]}else t=0;
return n.apply(X,arguments)}}function Xi(n,t){var r=-1,e=n.length,u=e-1;for(t=t===X?e:t;++r<t;){var i=tu(r,u),o=n[i];n[i]=n[r],n[r]=o}return n.length=t,n}function no(n){if("string"==typeof n||bc(n))return n;var t=n+"";return"0"==t&&1/n==-Sn?"-0":t}function to(n){if(null!=n){try{return dl.call(n)}catch(n){}try{return n+""}catch(n){}}return""}function ro(n,t){return r($n,function(r){var e="_."+r[0];t&r[1]&&!o(n,e)&&n.push(e)}),n.sort()}function eo(n){if(n instanceof Ct)return n.clone();var t=new Y(n.__wrapped__,n.__chain__);
return t.__actions__=Tu(n.__actions__),t.__index__=n.__index__,t.__values__=n.__values__,t}function uo(n,t,r){t=(r?Ui(n,t,r):t===X)?1:Gl(kc(t),0);var e=null==n?0:n.length;if(!e||t<1)return[];for(var u=0,i=0,o=il(Fl(e/t));u<e;)o[i++]=au(n,u,u+=t);return o}function io(n){for(var t=-1,r=null==n?0:n.length,e=0,u=[];++t<r;){var i=n[t];i&&(u[e++]=i)}return u}function oo(){var n=arguments.length;if(!n)return[];for(var t=il(n-1),r=arguments[0],e=n;e--;)t[e-1]=arguments[e];return a(bh(r)?Tu(r):[r],ee(t,1));
}function fo(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),au(n,t<0?0:t,e)):[]}function co(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),t=e-t,au(n,0,t<0?0:t)):[]}function ao(n,t){return n&&n.length?bu(n,mi(t,3),!0,!0):[]}function lo(n,t){return n&&n.length?bu(n,mi(t,3),!0):[]}function so(n,t,r,e){var u=null==n?0:n.length;return u?(r&&"number"!=typeof r&&Ui(n,t,r)&&(r=0,e=u),ne(n,t,r,e)):[]}function ho(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=null==r?0:kc(r);
return u<0&&(u=Gl(e+u,0)),g(n,mi(t,3),u)}function po(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=e-1;return r!==X&&(u=kc(r),u=r<0?Gl(e+u,0):Hl(u,e-1)),g(n,mi(t,3),u,!0)}function _o(n){return(null==n?0:n.length)?ee(n,1):[]}function vo(n){return(null==n?0:n.length)?ee(n,Sn):[]}function go(n,t){return(null==n?0:n.length)?(t=t===X?1:kc(t),ee(n,t)):[]}function yo(n){for(var t=-1,r=null==n?0:n.length,e={};++t<r;){var u=n[t];e[u[0]]=u[1]}return e}function bo(n){return n&&n.length?n[0]:X}function wo(n,t,r){
var e=null==n?0:n.length;if(!e)return-1;var u=null==r?0:kc(r);return u<0&&(u=Gl(e+u,0)),y(n,t,u)}function mo(n){return(null==n?0:n.length)?au(n,0,-1):[]}function xo(n,t){return null==n?"":Kl.call(n,t)}function jo(n){var t=null==n?0:n.length;return t?n[t-1]:X}function Ao(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=e;return r!==X&&(u=kc(r),u=u<0?Gl(e+u,0):Hl(u,e-1)),t===t?K(n,t,u):g(n,b,u,!0)}function ko(n,t){return n&&n.length?Ge(n,kc(t)):X}function Oo(n,t){return n&&n.length&&t&&t.length?Xe(n,t):n;
}function Io(n,t,r){return n&&n.length&&t&&t.length?Xe(n,t,mi(r,2)):n}function Ro(n,t,r){return n&&n.length&&t&&t.length?Xe(n,t,X,r):n}function zo(n,t){var r=[];if(!n||!n.length)return r;var e=-1,u=[],i=n.length;for(t=mi(t,3);++e<i;){var o=n[e];t(o,e,n)&&(r.push(o),u.push(e))}return nu(n,u),r}function Eo(n){return null==n?n:Xl.call(n)}function So(n,t,r){var e=null==n?0:n.length;return e?(r&&"number"!=typeof r&&Ui(n,t,r)?(t=0,r=e):(t=null==t?0:kc(t),r=r===X?e:kc(r)),au(n,t,r)):[]}function Wo(n,t){
return su(n,t)}function Lo(n,t,r){return hu(n,t,mi(r,2))}function Co(n,t){var r=null==n?0:n.length;if(r){var e=su(n,t);if(e<r&&Gf(n[e],t))return e}return-1}function Uo(n,t){return su(n,t,!0)}function Bo(n,t,r){return hu(n,t,mi(r,2),!0)}function To(n,t){if(null==n?0:n.length){var r=su(n,t,!0)-1;if(Gf(n[r],t))return r}return-1}function $o(n){return n&&n.length?pu(n):[]}function Do(n,t){return n&&n.length?pu(n,mi(t,2)):[]}function Mo(n){var t=null==n?0:n.length;return t?au(n,1,t):[]}function Fo(n,t,r){
return n&&n.length?(t=r||t===X?1:kc(t),au(n,0,t<0?0:t)):[]}function No(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),t=e-t,au(n,t<0?0:t,e)):[]}function Po(n,t){return n&&n.length?bu(n,mi(t,3),!1,!0):[]}function qo(n,t){return n&&n.length?bu(n,mi(t,3)):[]}function Zo(n){return n&&n.length?gu(n):[]}function Ko(n,t){return n&&n.length?gu(n,mi(t,2)):[]}function Vo(n,t){return t="function"==typeof t?t:X,n&&n.length?gu(n,X,t):[]}function Go(n){if(!n||!n.length)return[];var t=0;return n=i(n,function(n){
if(Jf(n))return t=Gl(n.length,t),!0}),O(t,function(t){return c(n,m(t))})}function Ho(t,r){if(!t||!t.length)return[];var e=Go(t);return null==r?e:c(e,function(t){return n(r,X,t)})}function Jo(n,t){return xu(n||[],t||[],Sr)}function Yo(n,t){return xu(n||[],t||[],fu)}function Qo(n){var t=Z(n);return t.__chain__=!0,t}function Xo(n,t){return t(n),n}function nf(n,t){return t(n)}function tf(){return Qo(this)}function rf(){return new Y(this.value(),this.__chain__)}function ef(){this.__values__===X&&(this.__values__=jc(this.value()));
var n=this.__index__>=this.__values__.length;return{done:n,value:n?X:this.__values__[this.__index__++]}}function uf(){return this}function of(n){for(var t,r=this;r instanceof J;){var e=eo(r);e.__index__=0,e.__values__=X,t?u.__wrapped__=e:t=e;var u=e;r=r.__wrapped__}return u.__wrapped__=n,t}function ff(){var n=this.__wrapped__;if(n instanceof Ct){var t=n;return this.__actions__.length&&(t=new Ct(this)),t=t.reverse(),t.__actions__.push({func:nf,args:[Eo],thisArg:X}),new Y(t,this.__chain__)}return this.thru(Eo);
}function cf(){return wu(this.__wrapped__,this.__actions__)}function af(n,t,r){var e=bh(n)?u:Jr;return r&&Ui(n,t,r)&&(t=X),e(n,mi(t,3))}function lf(n,t){return(bh(n)?i:te)(n,mi(t,3))}function sf(n,t){return ee(yf(n,t),1)}function hf(n,t){return ee(yf(n,t),Sn)}function pf(n,t,r){return r=r===X?1:kc(r),ee(yf(n,t),r)}function _f(n,t){return(bh(n)?r:ys)(n,mi(t,3))}function vf(n,t){return(bh(n)?e:ds)(n,mi(t,3))}function gf(n,t,r,e){n=Hf(n)?n:ra(n),r=r&&!e?kc(r):0;var u=n.length;return r<0&&(r=Gl(u+r,0)),
dc(n)?r<=u&&n.indexOf(t,r)>-1:!!u&&y(n,t,r)>-1}function yf(n,t){return(bh(n)?c:Pe)(n,mi(t,3))}function df(n,t,r,e){return null==n?[]:(bh(t)||(t=null==t?[]:[t]),r=e?X:r,bh(r)||(r=null==r?[]:[r]),He(n,t,r))}function bf(n,t,r){var e=bh(n)?l:j,u=arguments.length<3;return e(n,mi(t,4),r,u,ys)}function wf(n,t,r){var e=bh(n)?s:j,u=arguments.length<3;return e(n,mi(t,4),r,u,ds)}function mf(n,t){return(bh(n)?i:te)(n,Uf(mi(t,3)))}function xf(n){return(bh(n)?Ir:iu)(n)}function jf(n,t,r){return t=(r?Ui(n,t,r):t===X)?1:kc(t),
(bh(n)?Rr:ou)(n,t)}function Af(n){return(bh(n)?zr:cu)(n)}function kf(n){if(null==n)return 0;if(Hf(n))return dc(n)?V(n):n.length;var t=zs(n);return t==Gn||t==tt?n.size:Me(n).length}function Of(n,t,r){var e=bh(n)?h:lu;return r&&Ui(n,t,r)&&(t=X),e(n,mi(t,3))}function If(n,t){if("function"!=typeof t)throw new pl(en);return n=kc(n),function(){if(--n<1)return t.apply(this,arguments)}}function Rf(n,t,r){return t=r?X:t,t=n&&null==t?n.length:t,ai(n,mn,X,X,X,X,t)}function zf(n,t){var r;if("function"!=typeof t)throw new pl(en);
return n=kc(n),function(){return--n>0&&(r=t.apply(this,arguments)),n<=1&&(t=X),r}}function Ef(n,t,r){t=r?X:t;var e=ai(n,yn,X,X,X,X,X,t);return e.placeholder=Ef.placeholder,e}function Sf(n,t,r){t=r?X:t;var e=ai(n,dn,X,X,X,X,X,t);return e.placeholder=Sf.placeholder,e}function Wf(n,t,r){function e(t){var r=h,e=p;return h=p=X,d=t,v=n.apply(e,r)}function u(n){return d=n,g=Ws(f,t),b?e(n):v}function i(n){var r=n-y,e=n-d,u=t-r;return w?Hl(u,_-e):u}function o(n){var r=n-y,e=n-d;return y===X||r>=t||r<0||w&&e>=_;
}function f(){var n=fh();return o(n)?c(n):(g=Ws(f,i(n)),X)}function c(n){return g=X,m&&h?e(n):(h=p=X,v)}function a(){g!==X&&As(g),d=0,h=y=p=g=X}function l(){return g===X?v:c(fh())}function s(){var n=fh(),r=o(n);if(h=arguments,p=this,y=n,r){if(g===X)return u(y);if(w)return As(g),g=Ws(f,t),e(y)}return g===X&&(g=Ws(f,t)),v}var h,p,_,v,g,y,d=0,b=!1,w=!1,m=!0;if("function"!=typeof n)throw new pl(en);return t=Ic(t)||0,fc(r)&&(b=!!r.leading,w="maxWait"in r,_=w?Gl(Ic(r.maxWait)||0,t):_,m="trailing"in r?!!r.trailing:m),
s.cancel=a,s.flush=l,s}function Lf(n){return ai(n,jn)}function Cf(n,t){if("function"!=typeof n||null!=t&&"function"!=typeof t)throw new pl(en);var r=function(){var e=arguments,u=t?t.apply(this,e):e[0],i=r.cache;if(i.has(u))return i.get(u);var o=n.apply(this,e);return r.cache=i.set(u,o)||i,o};return r.cache=new(Cf.Cache||sr),r}function Uf(n){if("function"!=typeof n)throw new pl(en);return function(){var t=arguments;switch(t.length){case 0:return!n.call(this);case 1:return!n.call(this,t[0]);case 2:
return!n.call(this,t[0],t[1]);case 3:return!n.call(this,t[0],t[1],t[2])}return!n.apply(this,t)}}function Bf(n){return zf(2,n)}function Tf(n,t){if("function"!=typeof n)throw new pl(en);return t=t===X?t:kc(t),uu(n,t)}function $f(t,r){if("function"!=typeof t)throw new pl(en);return r=null==r?0:Gl(kc(r),0),uu(function(e){var u=e[r],i=Ou(e,0,r);return u&&a(i,u),n(t,this,i)})}function Df(n,t,r){var e=!0,u=!0;if("function"!=typeof n)throw new pl(en);return fc(r)&&(e="leading"in r?!!r.leading:e,u="trailing"in r?!!r.trailing:u),
Wf(n,t,{leading:e,maxWait:t,trailing:u})}function Mf(n){return Rf(n,1)}function Ff(n,t){return ph(Au(t),n)}function Nf(){if(!arguments.length)return[];var n=arguments[0];return bh(n)?n:[n]}function Pf(n){return Fr(n,sn)}function qf(n,t){return t="function"==typeof t?t:X,Fr(n,sn,t)}function Zf(n){return Fr(n,an|sn)}function Kf(n,t){return t="function"==typeof t?t:X,Fr(n,an|sn,t)}function Vf(n,t){return null==t||Pr(n,t,Pc(t))}function Gf(n,t){return n===t||n!==n&&t!==t}function Hf(n){return null!=n&&oc(n.length)&&!uc(n);
}function Jf(n){return cc(n)&&Hf(n)}function Yf(n){return n===!0||n===!1||cc(n)&&we(n)==Nn}function Qf(n){return cc(n)&&1===n.nodeType&&!gc(n)}function Xf(n){if(null==n)return!0;if(Hf(n)&&(bh(n)||"string"==typeof n||"function"==typeof n.splice||mh(n)||Oh(n)||dh(n)))return!n.length;var t=zs(n);if(t==Gn||t==tt)return!n.size;if(Mi(n))return!Me(n).length;for(var r in n)if(bl.call(n,r))return!1;return!0}function nc(n,t){return Se(n,t)}function tc(n,t,r){r="function"==typeof r?r:X;var e=r?r(n,t):X;return e===X?Se(n,t,X,r):!!e;
}function rc(n){if(!cc(n))return!1;var t=we(n);return t==Zn||t==qn||"string"==typeof n.message&&"string"==typeof n.name&&!gc(n)}function ec(n){return"number"==typeof n&&Zl(n)}function uc(n){if(!fc(n))return!1;var t=we(n);return t==Kn||t==Vn||t==Fn||t==Xn}function ic(n){return"number"==typeof n&&n==kc(n)}function oc(n){return"number"==typeof n&&n>-1&&n%1==0&&n<=Wn}function fc(n){var t=typeof n;return null!=n&&("object"==t||"function"==t)}function cc(n){return null!=n&&"object"==typeof n}function ac(n,t){
return n===t||Ce(n,t,ji(t))}function lc(n,t,r){return r="function"==typeof r?r:X,Ce(n,t,ji(t),r)}function sc(n){return vc(n)&&n!=+n}function hc(n){if(Es(n))throw new fl(rn);return Ue(n)}function pc(n){return null===n}function _c(n){return null==n}function vc(n){return"number"==typeof n||cc(n)&&we(n)==Hn}function gc(n){if(!cc(n)||we(n)!=Yn)return!1;var t=El(n);if(null===t)return!0;var r=bl.call(t,"constructor")&&t.constructor;return"function"==typeof r&&r instanceof r&&dl.call(r)==jl}function yc(n){
return ic(n)&&n>=-Wn&&n<=Wn}function dc(n){return"string"==typeof n||!bh(n)&&cc(n)&&we(n)==rt}function bc(n){return"symbol"==typeof n||cc(n)&&we(n)==et}function wc(n){return n===X}function mc(n){return cc(n)&&zs(n)==it}function xc(n){return cc(n)&&we(n)==ot}function jc(n){if(!n)return[];if(Hf(n))return dc(n)?G(n):Tu(n);if(Ul&&n[Ul])return D(n[Ul]());var t=zs(n);return(t==Gn?M:t==tt?P:ra)(n)}function Ac(n){if(!n)return 0===n?n:0;if(n=Ic(n),n===Sn||n===-Sn){return(n<0?-1:1)*Ln}return n===n?n:0}function kc(n){
var t=Ac(n),r=t%1;return t===t?r?t-r:t:0}function Oc(n){return n?Mr(kc(n),0,Un):0}function Ic(n){if("number"==typeof n)return n;if(bc(n))return Cn;if(fc(n)){var t="function"==typeof n.valueOf?n.valueOf():n;n=fc(t)?t+"":t}if("string"!=typeof n)return 0===n?n:+n;n=R(n);var r=qt.test(n);return r||Kt.test(n)?Xr(n.slice(2),r?2:8):Pt.test(n)?Cn:+n}function Rc(n){return $u(n,qc(n))}function zc(n){return n?Mr(kc(n),-Wn,Wn):0===n?n:0}function Ec(n){return null==n?"":vu(n)}function Sc(n,t){var r=gs(n);return null==t?r:Cr(r,t);
}function Wc(n,t){return v(n,mi(t,3),ue)}function Lc(n,t){return v(n,mi(t,3),oe)}function Cc(n,t){return null==n?n:bs(n,mi(t,3),qc)}function Uc(n,t){return null==n?n:ws(n,mi(t,3),qc)}function Bc(n,t){return n&&ue(n,mi(t,3))}function Tc(n,t){return n&&oe(n,mi(t,3))}function $c(n){return null==n?[]:fe(n,Pc(n))}function Dc(n){return null==n?[]:fe(n,qc(n))}function Mc(n,t,r){var e=null==n?X:_e(n,t);return e===X?r:e}function Fc(n,t){return null!=n&&Ri(n,t,xe)}function Nc(n,t){return null!=n&&Ri(n,t,je);
}function Pc(n){return Hf(n)?Or(n):Me(n)}function qc(n){return Hf(n)?Or(n,!0):Fe(n)}function Zc(n,t){var r={};return t=mi(t,3),ue(n,function(n,e,u){Br(r,t(n,e,u),n)}),r}function Kc(n,t){var r={};return t=mi(t,3),ue(n,function(n,e,u){Br(r,e,t(n,e,u))}),r}function Vc(n,t){return Gc(n,Uf(mi(t)))}function Gc(n,t){if(null==n)return{};var r=c(di(n),function(n){return[n]});return t=mi(t),Ye(n,r,function(n,r){return t(n,r[0])})}function Hc(n,t,r){t=ku(t,n);var e=-1,u=t.length;for(u||(u=1,n=X);++e<u;){var i=null==n?X:n[no(t[e])];
i===X&&(e=u,i=r),n=uc(i)?i.call(n):i}return n}function Jc(n,t,r){return null==n?n:fu(n,t,r)}function Yc(n,t,r,e){return e="function"==typeof e?e:X,null==n?n:fu(n,t,r,e)}function Qc(n,t,e){var u=bh(n),i=u||mh(n)||Oh(n);if(t=mi(t,4),null==e){var o=n&&n.constructor;e=i?u?new o:[]:fc(n)&&uc(o)?gs(El(n)):{}}return(i?r:ue)(n,function(n,r,u){return t(e,n,r,u)}),e}function Xc(n,t){return null==n||yu(n,t)}function na(n,t,r){return null==n?n:du(n,t,Au(r))}function ta(n,t,r,e){return e="function"==typeof e?e:X,
null==n?n:du(n,t,Au(r),e)}function ra(n){return null==n?[]:E(n,Pc(n))}function ea(n){return null==n?[]:E(n,qc(n))}function ua(n,t,r){return r===X&&(r=t,t=X),r!==X&&(r=Ic(r),r=r===r?r:0),t!==X&&(t=Ic(t),t=t===t?t:0),Mr(Ic(n),t,r)}function ia(n,t,r){return t=Ac(t),r===X?(r=t,t=0):r=Ac(r),n=Ic(n),Ae(n,t,r)}function oa(n,t,r){if(r&&"boolean"!=typeof r&&Ui(n,t,r)&&(t=r=X),r===X&&("boolean"==typeof t?(r=t,t=X):"boolean"==typeof n&&(r=n,n=X)),n===X&&t===X?(n=0,t=1):(n=Ac(n),t===X?(t=n,n=0):t=Ac(t)),n>t){
var e=n;n=t,t=e}if(r||n%1||t%1){var u=Ql();return Hl(n+u*(t-n+Qr("1e-"+((u+"").length-1))),t)}return tu(n,t)}function fa(n){return Qh(Ec(n).toLowerCase())}function ca(n){return n=Ec(n),n&&n.replace(Gt,ve).replace(Dr,"")}function aa(n,t,r){n=Ec(n),t=vu(t);var e=n.length;r=r===X?e:Mr(kc(r),0,e);var u=r;return r-=t.length,r>=0&&n.slice(r,u)==t}function la(n){return n=Ec(n),n&&At.test(n)?n.replace(xt,ge):n}function sa(n){return n=Ec(n),n&&Wt.test(n)?n.replace(St,"\\$&"):n}function ha(n,t,r){n=Ec(n),t=kc(t);
var e=t?V(n):0;if(!t||e>=t)return n;var u=(t-e)/2;return ri(Nl(u),r)+n+ri(Fl(u),r)}function pa(n,t,r){n=Ec(n),t=kc(t);var e=t?V(n):0;return t&&e<t?n+ri(t-e,r):n}function _a(n,t,r){n=Ec(n),t=kc(t);var e=t?V(n):0;return t&&e<t?ri(t-e,r)+n:n}function va(n,t,r){return r||null==t?t=0:t&&(t=+t),Yl(Ec(n).replace(Lt,""),t||0)}function ga(n,t,r){return t=(r?Ui(n,t,r):t===X)?1:kc(t),eu(Ec(n),t)}function ya(){var n=arguments,t=Ec(n[0]);return n.length<3?t:t.replace(n[1],n[2])}function da(n,t,r){return r&&"number"!=typeof r&&Ui(n,t,r)&&(t=r=X),
(r=r===X?Un:r>>>0)?(n=Ec(n),n&&("string"==typeof t||null!=t&&!Ah(t))&&(t=vu(t),!t&&T(n))?Ou(G(n),0,r):n.split(t,r)):[]}function ba(n,t,r){return n=Ec(n),r=null==r?0:Mr(kc(r),0,n.length),t=vu(t),n.slice(r,r+t.length)==t}function wa(n,t,r){var e=Z.templateSettings;r&&Ui(n,t,r)&&(t=X),n=Ec(n),t=Sh({},t,e,li);var u,i,o=Sh({},t.imports,e.imports,li),f=Pc(o),c=E(o,f),a=0,l=t.interpolate||Ht,s="__p += '",h=sl((t.escape||Ht).source+"|"+l.source+"|"+(l===It?Ft:Ht).source+"|"+(t.evaluate||Ht).source+"|$","g"),p="//# sourceURL="+(bl.call(t,"sourceURL")?(t.sourceURL+"").replace(/\s/g," "):"lodash.templateSources["+ ++Zr+"]")+"\n";
n.replace(h,function(t,r,e,o,f,c){return e||(e=o),s+=n.slice(a,c).replace(Jt,U),r&&(u=!0,s+="' +\n__e("+r+") +\n'"),f&&(i=!0,s+="';\n"+f+";\n__p += '"),e&&(s+="' +\n((__t = ("+e+")) == null ? '' : __t) +\n'"),a=c+t.length,t}),s+="';\n";var _=bl.call(t,"variable")&&t.variable;if(_){if(Dt.test(_))throw new fl(un)}else s="with (obj) {\n"+s+"\n}\n";s=(i?s.replace(dt,""):s).replace(bt,"$1").replace(wt,"$1;"),s="function("+(_||"obj")+") {\n"+(_?"":"obj || (obj = {});\n")+"var __t, __p = ''"+(u?", __e = _.escape":"")+(i?", __j = Array.prototype.join;\nfunction print() { __p += __j.call(arguments, '') }\n":";\n")+s+"return __p\n}";
var v=Xh(function(){return cl(f,p+"return "+s).apply(X,c)});if(v.source=s,rc(v))throw v;return v}function ma(n){return Ec(n).toLowerCase()}function xa(n){return Ec(n).toUpperCase()}function ja(n,t,r){if(n=Ec(n),n&&(r||t===X))return R(n);if(!n||!(t=vu(t)))return n;var e=G(n),u=G(t);return Ou(e,W(e,u),L(e,u)+1).join("")}function Aa(n,t,r){if(n=Ec(n),n&&(r||t===X))return n.slice(0,H(n)+1);if(!n||!(t=vu(t)))return n;var e=G(n);return Ou(e,0,L(e,G(t))+1).join("")}function ka(n,t,r){if(n=Ec(n),n&&(r||t===X))return n.replace(Lt,"");
if(!n||!(t=vu(t)))return n;var e=G(n);return Ou(e,W(e,G(t))).join("")}function Oa(n,t){var r=An,e=kn;if(fc(t)){var u="separator"in t?t.separator:u;r="length"in t?kc(t.length):r,e="omission"in t?vu(t.omission):e}n=Ec(n);var i=n.length;if(T(n)){var o=G(n);i=o.length}if(r>=i)return n;var f=r-V(e);if(f<1)return e;var c=o?Ou(o,0,f).join(""):n.slice(0,f);if(u===X)return c+e;if(o&&(f+=c.length-f),Ah(u)){if(n.slice(f).search(u)){var a,l=c;for(u.global||(u=sl(u.source,Ec(Nt.exec(u))+"g")),u.lastIndex=0;a=u.exec(l);)var s=a.index;
c=c.slice(0,s===X?f:s)}}else if(n.indexOf(vu(u),f)!=f){var h=c.lastIndexOf(u);h>-1&&(c=c.slice(0,h))}return c+e}function Ia(n){return n=Ec(n),n&&jt.test(n)?n.replace(mt,ye):n}function Ra(n,t,r){return n=Ec(n),t=r?X:t,t===X?$(n)?Q(n):_(n):n.match(t)||[]}function za(t){var r=null==t?0:t.length,e=mi();return t=r?c(t,function(n){if("function"!=typeof n[1])throw new pl(en);return[e(n[0]),n[1]]}):[],uu(function(e){for(var u=-1;++u<r;){var i=t[u];if(n(i[0],this,e))return n(i[1],this,e)}})}function Ea(n){
return Nr(Fr(n,an))}function Sa(n){return function(){return n}}function Wa(n,t){return null==n||n!==n?t:n}function La(n){return n}function Ca(n){return De("function"==typeof n?n:Fr(n,an))}function Ua(n){return qe(Fr(n,an))}function Ba(n,t){return Ze(n,Fr(t,an))}function Ta(n,t,e){var u=Pc(t),i=fe(t,u);null!=e||fc(t)&&(i.length||!u.length)||(e=t,t=n,n=this,i=fe(t,Pc(t)));var o=!(fc(e)&&"chain"in e&&!e.chain),f=uc(n);return r(i,function(r){var e=t[r];n[r]=e,f&&(n.prototype[r]=function(){var t=this.__chain__;
if(o||t){var r=n(this.__wrapped__);return(r.__actions__=Tu(this.__actions__)).push({func:e,args:arguments,thisArg:n}),r.__chain__=t,r}return e.apply(n,a([this.value()],arguments))})}),n}function $a(){return re._===this&&(re._=Al),this}function Da(){}function Ma(n){return n=kc(n),uu(function(t){return Ge(t,n)})}function Fa(n){return Bi(n)?m(no(n)):Qe(n)}function Na(n){return function(t){return null==n?X:_e(n,t)}}function Pa(){return[]}function qa(){return!1}function Za(){return{}}function Ka(){return"";
}function Va(){return!0}function Ga(n,t){if(n=kc(n),n<1||n>Wn)return[];var r=Un,e=Hl(n,Un);t=mi(t),n-=Un;for(var u=O(e,t);++r<n;)t(r);return u}function Ha(n){return bh(n)?c(n,no):bc(n)?[n]:Tu(Cs(Ec(n)))}function Ja(n){var t=++wl;return Ec(n)+t}function Ya(n){return n&&n.length?Yr(n,La,me):X}function Qa(n,t){return n&&n.length?Yr(n,mi(t,2),me):X}function Xa(n){return w(n,La)}function nl(n,t){return w(n,mi(t,2))}function tl(n){return n&&n.length?Yr(n,La,Ne):X}function rl(n,t){return n&&n.length?Yr(n,mi(t,2),Ne):X;
}function el(n){return n&&n.length?k(n,La):0}function ul(n,t){return n&&n.length?k(n,mi(t,2)):0}x=null==x?re:be.defaults(re.Object(),x,be.pick(re,qr));var il=x.Array,ol=x.Date,fl=x.Error,cl=x.Function,al=x.Math,ll=x.Object,sl=x.RegExp,hl=x.String,pl=x.TypeError,_l=il.prototype,vl=cl.prototype,gl=ll.prototype,yl=x["__core-js_shared__"],dl=vl.toString,bl=gl.hasOwnProperty,wl=0,ml=function(){var n=/[^.]+$/.exec(yl&&yl.keys&&yl.keys.IE_PROTO||"");return n?"Symbol(src)_1."+n:""}(),xl=gl.toString,jl=dl.call(ll),Al=re._,kl=sl("^"+dl.call(bl).replace(St,"\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$"),Ol=ie?x.Buffer:X,Il=x.Symbol,Rl=x.Uint8Array,zl=Ol?Ol.allocUnsafe:X,El=F(ll.getPrototypeOf,ll),Sl=ll.create,Wl=gl.propertyIsEnumerable,Ll=_l.splice,Cl=Il?Il.isConcatSpreadable:X,Ul=Il?Il.iterator:X,Bl=Il?Il.toStringTag:X,Tl=function(){
try{var n=Ai(ll,"defineProperty");return n({},"",{}),n}catch(n){}}(),$l=x.clearTimeout!==re.clearTimeout&&x.clearTimeout,Dl=ol&&ol.now!==re.Date.now&&ol.now,Ml=x.setTimeout!==re.setTimeout&&x.setTimeout,Fl=al.ceil,Nl=al.floor,Pl=ll.getOwnPropertySymbols,ql=Ol?Ol.isBuffer:X,Zl=x.isFinite,Kl=_l.join,Vl=F(ll.keys,ll),Gl=al.max,Hl=al.min,Jl=ol.now,Yl=x.parseInt,Ql=al.random,Xl=_l.reverse,ns=Ai(x,"DataView"),ts=Ai(x,"Map"),rs=Ai(x,"Promise"),es=Ai(x,"Set"),us=Ai(x,"WeakMap"),is=Ai(ll,"create"),os=us&&new us,fs={},cs=to(ns),as=to(ts),ls=to(rs),ss=to(es),hs=to(us),ps=Il?Il.prototype:X,_s=ps?ps.valueOf:X,vs=ps?ps.toString:X,gs=function(){
function n(){}return function(t){if(!fc(t))return{};if(Sl)return Sl(t);n.prototype=t;var r=new n;return n.prototype=X,r}}();Z.templateSettings={escape:kt,evaluate:Ot,interpolate:It,variable:"",imports:{_:Z}},Z.prototype=J.prototype,Z.prototype.constructor=Z,Y.prototype=gs(J.prototype),Y.prototype.constructor=Y,Ct.prototype=gs(J.prototype),Ct.prototype.constructor=Ct,Xt.prototype.clear=nr,Xt.prototype.delete=tr,Xt.prototype.get=rr,Xt.prototype.has=er,Xt.prototype.set=ur,ir.prototype.clear=or,ir.prototype.delete=fr,
ir.prototype.get=cr,ir.prototype.has=ar,ir.prototype.set=lr,sr.prototype.clear=hr,sr.prototype.delete=pr,sr.prototype.get=_r,sr.prototype.has=vr,sr.prototype.set=gr,yr.prototype.add=yr.prototype.push=dr,yr.prototype.has=br,wr.prototype.clear=mr,wr.prototype.delete=xr,wr.prototype.get=jr,wr.prototype.has=Ar,wr.prototype.set=kr;var ys=Pu(ue),ds=Pu(oe,!0),bs=qu(),ws=qu(!0),ms=os?function(n,t){return os.set(n,t),n}:La,xs=Tl?function(n,t){return Tl(n,"toString",{configurable:!0,enumerable:!1,value:Sa(t),
writable:!0})}:La,js=uu,As=$l||function(n){return re.clearTimeout(n)},ks=es&&1/P(new es([,-0]))[1]==Sn?function(n){return new es(n)}:Da,Os=os?function(n){return os.get(n)}:Da,Is=Pl?function(n){return null==n?[]:(n=ll(n),i(Pl(n),function(t){return Wl.call(n,t)}))}:Pa,Rs=Pl?function(n){for(var t=[];n;)a(t,Is(n)),n=El(n);return t}:Pa,zs=we;(ns&&zs(new ns(new ArrayBuffer(1)))!=ct||ts&&zs(new ts)!=Gn||rs&&zs(rs.resolve())!=Qn||es&&zs(new es)!=tt||us&&zs(new us)!=it)&&(zs=function(n){var t=we(n),r=t==Yn?n.constructor:X,e=r?to(r):"";
if(e)switch(e){case cs:return ct;case as:return Gn;case ls:return Qn;case ss:return tt;case hs:return it}return t});var Es=yl?uc:qa,Ss=Qi(ms),Ws=Ml||function(n,t){return re.setTimeout(n,t)},Ls=Qi(xs),Cs=Pi(function(n){var t=[];return 46===n.charCodeAt(0)&&t.push(""),n.replace(Et,function(n,r,e,u){t.push(e?u.replace(Mt,"$1"):r||n)}),t}),Us=uu(function(n,t){return Jf(n)?Hr(n,ee(t,1,Jf,!0)):[]}),Bs=uu(function(n,t){var r=jo(t);return Jf(r)&&(r=X),Jf(n)?Hr(n,ee(t,1,Jf,!0),mi(r,2)):[]}),Ts=uu(function(n,t){
var r=jo(t);return Jf(r)&&(r=X),Jf(n)?Hr(n,ee(t,1,Jf,!0),X,r):[]}),$s=uu(function(n){var t=c(n,ju);return t.length&&t[0]===n[0]?ke(t):[]}),Ds=uu(function(n){var t=jo(n),r=c(n,ju);return t===jo(r)?t=X:r.pop(),r.length&&r[0]===n[0]?ke(r,mi(t,2)):[]}),Ms=uu(function(n){var t=jo(n),r=c(n,ju);return t="function"==typeof t?t:X,t&&r.pop(),r.length&&r[0]===n[0]?ke(r,X,t):[]}),Fs=uu(Oo),Ns=gi(function(n,t){var r=null==n?0:n.length,e=Tr(n,t);return nu(n,c(t,function(n){return Ci(n,r)?+n:n}).sort(Lu)),e}),Ps=uu(function(n){
return gu(ee(n,1,Jf,!0))}),qs=uu(function(n){var t=jo(n);return Jf(t)&&(t=X),gu(ee(n,1,Jf,!0),mi(t,2))}),Zs=uu(function(n){var t=jo(n);return t="function"==typeof t?t:X,gu(ee(n,1,Jf,!0),X,t)}),Ks=uu(function(n,t){return Jf(n)?Hr(n,t):[]}),Vs=uu(function(n){return mu(i(n,Jf))}),Gs=uu(function(n){var t=jo(n);return Jf(t)&&(t=X),mu(i(n,Jf),mi(t,2))}),Hs=uu(function(n){var t=jo(n);return t="function"==typeof t?t:X,mu(i(n,Jf),X,t)}),Js=uu(Go),Ys=uu(function(n){var t=n.length,r=t>1?n[t-1]:X;return r="function"==typeof r?(n.pop(),
r):X,Ho(n,r)}),Qs=gi(function(n){var t=n.length,r=t?n[0]:0,e=this.__wrapped__,u=function(t){return Tr(t,n)};return!(t>1||this.__actions__.length)&&e instanceof Ct&&Ci(r)?(e=e.slice(r,+r+(t?1:0)),e.__actions__.push({func:nf,args:[u],thisArg:X}),new Y(e,this.__chain__).thru(function(n){return t&&!n.length&&n.push(X),n})):this.thru(u)}),Xs=Fu(function(n,t,r){bl.call(n,r)?++n[r]:Br(n,r,1)}),nh=Ju(ho),th=Ju(po),rh=Fu(function(n,t,r){bl.call(n,r)?n[r].push(t):Br(n,r,[t])}),eh=uu(function(t,r,e){var u=-1,i="function"==typeof r,o=Hf(t)?il(t.length):[];
return ys(t,function(t){o[++u]=i?n(r,t,e):Ie(t,r,e)}),o}),uh=Fu(function(n,t,r){Br(n,r,t)}),ih=Fu(function(n,t,r){n[r?0:1].push(t)},function(){return[[],[]]}),oh=uu(function(n,t){if(null==n)return[];var r=t.length;return r>1&&Ui(n,t[0],t[1])?t=[]:r>2&&Ui(t[0],t[1],t[2])&&(t=[t[0]]),He(n,ee(t,1),[])}),fh=Dl||function(){return re.Date.now()},ch=uu(function(n,t,r){var e=_n;if(r.length){var u=N(r,wi(ch));e|=bn}return ai(n,e,t,r,u)}),ah=uu(function(n,t,r){var e=_n|vn;if(r.length){var u=N(r,wi(ah));e|=bn;
}return ai(t,e,n,r,u)}),lh=uu(function(n,t){return Gr(n,1,t)}),sh=uu(function(n,t,r){return Gr(n,Ic(t)||0,r)});Cf.Cache=sr;var hh=js(function(t,r){r=1==r.length&&bh(r[0])?c(r[0],z(mi())):c(ee(r,1),z(mi()));var e=r.length;return uu(function(u){for(var i=-1,o=Hl(u.length,e);++i<o;)u[i]=r[i].call(this,u[i]);return n(t,this,u)})}),ph=uu(function(n,t){return ai(n,bn,X,t,N(t,wi(ph)))}),_h=uu(function(n,t){return ai(n,wn,X,t,N(t,wi(_h)))}),vh=gi(function(n,t){return ai(n,xn,X,X,X,t)}),gh=ii(me),yh=ii(function(n,t){
return n>=t}),dh=Re(function(){return arguments}())?Re:function(n){return cc(n)&&bl.call(n,"callee")&&!Wl.call(n,"callee")},bh=il.isArray,wh=ce?z(ce):ze,mh=ql||qa,xh=ae?z(ae):Ee,jh=le?z(le):Le,Ah=se?z(se):Be,kh=he?z(he):Te,Oh=pe?z(pe):$e,Ih=ii(Ne),Rh=ii(function(n,t){return n<=t}),zh=Nu(function(n,t){if(Mi(t)||Hf(t))return $u(t,Pc(t),n),X;for(var r in t)bl.call(t,r)&&Sr(n,r,t[r])}),Eh=Nu(function(n,t){$u(t,qc(t),n)}),Sh=Nu(function(n,t,r,e){$u(t,qc(t),n,e)}),Wh=Nu(function(n,t,r,e){$u(t,Pc(t),n,e);
}),Lh=gi(Tr),Ch=uu(function(n,t){n=ll(n);var r=-1,e=t.length,u=e>2?t[2]:X;for(u&&Ui(t[0],t[1],u)&&(e=1);++r<e;)for(var i=t[r],o=qc(i),f=-1,c=o.length;++f<c;){var a=o[f],l=n[a];(l===X||Gf(l,gl[a])&&!bl.call(n,a))&&(n[a]=i[a])}return n}),Uh=uu(function(t){return t.push(X,si),n(Mh,X,t)}),Bh=Xu(function(n,t,r){null!=t&&"function"!=typeof t.toString&&(t=xl.call(t)),n[t]=r},Sa(La)),Th=Xu(function(n,t,r){null!=t&&"function"!=typeof t.toString&&(t=xl.call(t)),bl.call(n,t)?n[t].push(r):n[t]=[r]},mi),$h=uu(Ie),Dh=Nu(function(n,t,r){
Ke(n,t,r)}),Mh=Nu(function(n,t,r,e){Ke(n,t,r,e)}),Fh=gi(function(n,t){var r={};if(null==n)return r;var e=!1;t=c(t,function(t){return t=ku(t,n),e||(e=t.length>1),t}),$u(n,di(n),r),e&&(r=Fr(r,an|ln|sn,hi));for(var u=t.length;u--;)yu(r,t[u]);return r}),Nh=gi(function(n,t){return null==n?{}:Je(n,t)}),Ph=ci(Pc),qh=ci(qc),Zh=Vu(function(n,t,r){return t=t.toLowerCase(),n+(r?fa(t):t)}),Kh=Vu(function(n,t,r){return n+(r?"-":"")+t.toLowerCase()}),Vh=Vu(function(n,t,r){return n+(r?" ":"")+t.toLowerCase()}),Gh=Ku("toLowerCase"),Hh=Vu(function(n,t,r){
return n+(r?"_":"")+t.toLowerCase()}),Jh=Vu(function(n,t,r){return n+(r?" ":"")+Qh(t)}),Yh=Vu(function(n,t,r){return n+(r?" ":"")+t.toUpperCase()}),Qh=Ku("toUpperCase"),Xh=uu(function(t,r){try{return n(t,X,r)}catch(n){return rc(n)?n:new fl(n)}}),np=gi(function(n,t){return r(t,function(t){t=no(t),Br(n,t,ch(n[t],n))}),n}),tp=Yu(),rp=Yu(!0),ep=uu(function(n,t){return function(r){return Ie(r,n,t)}}),up=uu(function(n,t){return function(r){return Ie(n,r,t)}}),ip=ti(c),op=ti(u),fp=ti(h),cp=ui(),ap=ui(!0),lp=ni(function(n,t){
return n+t},0),sp=fi("ceil"),hp=ni(function(n,t){return n/t},1),pp=fi("floor"),_p=ni(function(n,t){return n*t},1),vp=fi("round"),gp=ni(function(n,t){return n-t},0);return Z.after=If,Z.ary=Rf,Z.assign=zh,Z.assignIn=Eh,Z.assignInWith=Sh,Z.assignWith=Wh,Z.at=Lh,Z.before=zf,Z.bind=ch,Z.bindAll=np,Z.bindKey=ah,Z.castArray=Nf,Z.chain=Qo,Z.chunk=uo,Z.compact=io,Z.concat=oo,Z.cond=za,Z.conforms=Ea,Z.constant=Sa,Z.countBy=Xs,Z.create=Sc,Z.curry=Ef,Z.curryRight=Sf,Z.debounce=Wf,Z.defaults=Ch,Z.defaultsDeep=Uh,
Z.defer=lh,Z.delay=sh,Z.difference=Us,Z.differenceBy=Bs,Z.differenceWith=Ts,Z.drop=fo,Z.dropRight=co,Z.dropRightWhile=ao,Z.dropWhile=lo,Z.fill=so,Z.filter=lf,Z.flatMap=sf,Z.flatMapDeep=hf,Z.flatMapDepth=pf,Z.flatten=_o,Z.flattenDeep=vo,Z.flattenDepth=go,Z.flip=Lf,Z.flow=tp,Z.flowRight=rp,Z.fromPairs=yo,Z.functions=$c,Z.functionsIn=Dc,Z.groupBy=rh,Z.initial=mo,Z.intersection=$s,Z.intersectionBy=Ds,Z.intersectionWith=Ms,Z.invert=Bh,Z.invertBy=Th,Z.invokeMap=eh,Z.iteratee=Ca,Z.keyBy=uh,Z.keys=Pc,Z.keysIn=qc,
Z.map=yf,Z.mapKeys=Zc,Z.mapValues=Kc,Z.matches=Ua,Z.matchesProperty=Ba,Z.memoize=Cf,Z.merge=Dh,Z.mergeWith=Mh,Z.method=ep,Z.methodOf=up,Z.mixin=Ta,Z.negate=Uf,Z.nthArg=Ma,Z.omit=Fh,Z.omitBy=Vc,Z.once=Bf,Z.orderBy=df,Z.over=ip,Z.overArgs=hh,Z.overEvery=op,Z.overSome=fp,Z.partial=ph,Z.partialRight=_h,Z.partition=ih,Z.pick=Nh,Z.pickBy=Gc,Z.property=Fa,Z.propertyOf=Na,Z.pull=Fs,Z.pullAll=Oo,Z.pullAllBy=Io,Z.pullAllWith=Ro,Z.pullAt=Ns,Z.range=cp,Z.rangeRight=ap,Z.rearg=vh,Z.reject=mf,Z.remove=zo,Z.rest=Tf,
Z.reverse=Eo,Z.sampleSize=jf,Z.set=Jc,Z.setWith=Yc,Z.shuffle=Af,Z.slice=So,Z.sortBy=oh,Z.sortedUniq=$o,Z.sortedUniqBy=Do,Z.split=da,Z.spread=$f,Z.tail=Mo,Z.take=Fo,Z.takeRight=No,Z.takeRightWhile=Po,Z.takeWhile=qo,Z.tap=Xo,Z.throttle=Df,Z.thru=nf,Z.toArray=jc,Z.toPairs=Ph,Z.toPairsIn=qh,Z.toPath=Ha,Z.toPlainObject=Rc,Z.transform=Qc,Z.unary=Mf,Z.union=Ps,Z.unionBy=qs,Z.unionWith=Zs,Z.uniq=Zo,Z.uniqBy=Ko,Z.uniqWith=Vo,Z.unset=Xc,Z.unzip=Go,Z.unzipWith=Ho,Z.update=na,Z.updateWith=ta,Z.values=ra,Z.valuesIn=ea,
Z.without=Ks,Z.words=Ra,Z.wrap=Ff,Z.xor=Vs,Z.xorBy=Gs,Z.xorWith=Hs,Z.zip=Js,Z.zipObject=Jo,Z.zipObjectDeep=Yo,Z.zipWith=Ys,Z.entries=Ph,Z.entriesIn=qh,Z.extend=Eh,Z.extendWith=Sh,Ta(Z,Z),Z.add=lp,Z.attempt=Xh,Z.camelCase=Zh,Z.capitalize=fa,Z.ceil=sp,Z.clamp=ua,Z.clone=Pf,Z.cloneDeep=Zf,Z.cloneDeepWith=Kf,Z.cloneWith=qf,Z.conformsTo=Vf,Z.deburr=ca,Z.defaultTo=Wa,Z.divide=hp,Z.endsWith=aa,Z.eq=Gf,Z.escape=la,Z.escapeRegExp=sa,Z.every=af,Z.find=nh,Z.findIndex=ho,Z.findKey=Wc,Z.findLast=th,Z.findLastIndex=po,
Z.findLastKey=Lc,Z.floor=pp,Z.forEach=_f,Z.forEachRight=vf,Z.forIn=Cc,Z.forInRight=Uc,Z.forOwn=Bc,Z.forOwnRight=Tc,Z.get=Mc,Z.gt=gh,Z.gte=yh,Z.has=Fc,Z.hasIn=Nc,Z.head=bo,Z.identity=La,Z.includes=gf,Z.indexOf=wo,Z.inRange=ia,Z.invoke=$h,Z.isArguments=dh,Z.isArray=bh,Z.isArrayBuffer=wh,Z.isArrayLike=Hf,Z.isArrayLikeObject=Jf,Z.isBoolean=Yf,Z.isBuffer=mh,Z.isDate=xh,Z.isElement=Qf,Z.isEmpty=Xf,Z.isEqual=nc,Z.isEqualWith=tc,Z.isError=rc,Z.isFinite=ec,Z.isFunction=uc,Z.isInteger=ic,Z.isLength=oc,Z.isMap=jh,
Z.isMatch=ac,Z.isMatchWith=lc,Z.isNaN=sc,Z.isNative=hc,Z.isNil=_c,Z.isNull=pc,Z.isNumber=vc,Z.isObject=fc,Z.isObjectLike=cc,Z.isPlainObject=gc,Z.isRegExp=Ah,Z.isSafeInteger=yc,Z.isSet=kh,Z.isString=dc,Z.isSymbol=bc,Z.isTypedArray=Oh,Z.isUndefined=wc,Z.isWeakMap=mc,Z.isWeakSet=xc,Z.join=xo,Z.kebabCase=Kh,Z.last=jo,Z.lastIndexOf=Ao,Z.lowerCase=Vh,Z.lowerFirst=Gh,Z.lt=Ih,Z.lte=Rh,Z.max=Ya,Z.maxBy=Qa,Z.mean=Xa,Z.meanBy=nl,Z.min=tl,Z.minBy=rl,Z.stubArray=Pa,Z.stubFalse=qa,Z.stubObject=Za,Z.stubString=Ka,
Z.stubTrue=Va,Z.multiply=_p,Z.nth=ko,Z.noConflict=$a,Z.noop=Da,Z.now=fh,Z.pad=ha,Z.padEnd=pa,Z.padStart=_a,Z.parseInt=va,Z.random=oa,Z.reduce=bf,Z.reduceRight=wf,Z.repeat=ga,Z.replace=ya,Z.result=Hc,Z.round=vp,Z.runInContext=p,Z.sample=xf,Z.size=kf,Z.snakeCase=Hh,Z.some=Of,Z.sortedIndex=Wo,Z.sortedIndexBy=Lo,Z.sortedIndexOf=Co,Z.sortedLastIndex=Uo,Z.sortedLastIndexBy=Bo,Z.sortedLastIndexOf=To,Z.startCase=Jh,Z.startsWith=ba,Z.subtract=gp,Z.sum=el,Z.sumBy=ul,Z.template=wa,Z.times=Ga,Z.toFinite=Ac,Z.toInteger=kc,
Z.toLength=Oc,Z.toLower=ma,Z.toNumber=Ic,Z.toSafeInteger=zc,Z.toString=Ec,Z.toUpper=xa,Z.trim=ja,Z.trimEnd=Aa,Z.trimStart=ka,Z.truncate=Oa,Z.unescape=Ia,Z.uniqueId=Ja,Z.upperCase=Yh,Z.upperFirst=Qh,Z.each=_f,Z.eachRight=vf,Z.first=bo,Ta(Z,function(){var n={};return ue(Z,function(t,r){bl.call(Z.prototype,r)||(n[r]=t)}),n}(),{chain:!1}),Z.VERSION=nn,r(["bind","bindKey","curry","curryRight","partial","partialRight"],function(n){Z[n].placeholder=Z}),r(["drop","take"],function(n,t){Ct.prototype[n]=function(r){
r=r===X?1:Gl(kc(r),0);var e=this.__filtered__&&!t?new Ct(this):this.clone();return e.__filtered__?e.__takeCount__=Hl(r,e.__takeCount__):e.__views__.push({size:Hl(r,Un),type:n+(e.__dir__<0?"Right":"")}),e},Ct.prototype[n+"Right"]=function(t){return this.reverse()[n](t).reverse()}}),r(["filter","map","takeWhile"],function(n,t){var r=t+1,e=r==Rn||r==En;Ct.prototype[n]=function(n){var t=this.clone();return t.__iteratees__.push({iteratee:mi(n,3),type:r}),t.__filtered__=t.__filtered__||e,t}}),r(["head","last"],function(n,t){
var r="take"+(t?"Right":"");Ct.prototype[n]=function(){return this[r](1).value()[0]}}),r(["initial","tail"],function(n,t){var r="drop"+(t?"":"Right");Ct.prototype[n]=function(){return this.__filtered__?new Ct(this):this[r](1)}}),Ct.prototype.compact=function(){return this.filter(La)},Ct.prototype.find=function(n){return this.filter(n).head()},Ct.prototype.findLast=function(n){return this.reverse().find(n)},Ct.prototype.invokeMap=uu(function(n,t){return"function"==typeof n?new Ct(this):this.map(function(r){
return Ie(r,n,t)})}),Ct.prototype.reject=function(n){return this.filter(Uf(mi(n)))},Ct.prototype.slice=function(n,t){n=kc(n);var r=this;return r.__filtered__&&(n>0||t<0)?new Ct(r):(n<0?r=r.takeRight(-n):n&&(r=r.drop(n)),t!==X&&(t=kc(t),r=t<0?r.dropRight(-t):r.take(t-n)),r)},Ct.prototype.takeRightWhile=function(n){return this.reverse().takeWhile(n).reverse()},Ct.prototype.toArray=function(){return this.take(Un)},ue(Ct.prototype,function(n,t){var r=/^(?:filter|find|map|reject)|While$/.test(t),e=/^(?:head|last)$/.test(t),u=Z[e?"take"+("last"==t?"Right":""):t],i=e||/^find/.test(t);
u&&(Z.prototype[t]=function(){var t=this.__wrapped__,o=e?[1]:arguments,f=t instanceof Ct,c=o[0],l=f||bh(t),s=function(n){var t=u.apply(Z,a([n],o));return e&&h?t[0]:t};l&&r&&"function"==typeof c&&1!=c.length&&(f=l=!1);var h=this.__chain__,p=!!this.__actions__.length,_=i&&!h,v=f&&!p;if(!i&&l){t=v?t:new Ct(this);var g=n.apply(t,o);return g.__actions__.push({func:nf,args:[s],thisArg:X}),new Y(g,h)}return _&&v?n.apply(this,o):(g=this.thru(s),_?e?g.value()[0]:g.value():g)})}),r(["pop","push","shift","sort","splice","unshift"],function(n){
var t=_l[n],r=/^(?:push|sort|unshift)$/.test(n)?"tap":"thru",e=/^(?:pop|shift)$/.test(n);Z.prototype[n]=function(){var n=arguments;if(e&&!this.__chain__){var u=this.value();return t.apply(bh(u)?u:[],n)}return this[r](function(r){return t.apply(bh(r)?r:[],n)})}}),ue(Ct.prototype,function(n,t){var r=Z[t];if(r){var e=r.name+"";bl.call(fs,e)||(fs[e]=[]),fs[e].push({name:t,func:r})}}),fs[Qu(X,vn).name]=[{name:"wrapper",func:X}],Ct.prototype.clone=$t,Ct.prototype.reverse=Yt,Ct.prototype.value=Qt,Z.prototype.at=Qs,
Z.prototype.chain=tf,Z.prototype.commit=rf,Z.prototype.next=ef,Z.prototype.plant=of,Z.prototype.reverse=ff,Z.prototype.toJSON=Z.prototype.valueOf=Z.prototype.value=cf,Z.prototype.first=Z.prototype.head,Ul&&(Z.prototype[Ul]=uf),Z},be=de();"function"==typeof define&&"object"==typeof define.amd&&define.amd?(re._=be,define(function(){return be})):ue?((ue.exports=be)._=be,ee._=be):re._=be}).call(this); |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,757 | [Bug][UI Next][V1.0.0-Alpha] There isn't the text button of the help document in the task form. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In the task form there isn't the text button of the help document for this type of task. But in the old UI, you can use this text button to access the help document when clicking the button.
The task form in the old UI seems like:
![image](https://user-images.githubusercontent.com/4928204/157206702-f3fcc62b-d601-4b0c-9a64-0eca84e3736d.png)
The task form in the new UI seems like:
![image](https://user-images.githubusercontent.com/4928204/157206713-5b58c180-cbc3-4629-a951-d37bd54707f6.png)
### What you expected to happen
I expect that I can use the text button to access the help document .
### How to reproduce
You can see the task form in the new UI.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8757 | https://github.com/apache/dolphinscheduler/pull/8775 | 59410af0aeb3eeca6eed0395050c1159a8c4dc6c | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | "2022-03-08T09:22:11Z" | java | "2022-03-09T10:15:07Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/detail-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
defineComponent,
PropType,
ref,
reactive,
watch,
nextTick,
provide,
computed,
h
} from 'vue'
import { useI18n } from 'vue-i18n'
import Modal from '@/components/modal'
import Detail from './detail'
import { formatModel } from './format-data'
import type { ITaskData, ITaskType } from './types'
import { HistoryOutlined, ProfileOutlined } from '@vicons/antd'
import { NIcon } from 'naive-ui'
import { Router, useRouter } from 'vue-router'
import { IWorkflowTaskInstance } from '@/views/projects/workflow/components/dag/types'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
data: {
type: Object as PropType<ITaskData>,
default: { code: 0, taskType: 'SHELL', name: '' }
},
projectCode: {
type: Number as PropType<number>,
required: true
},
readonly: {
type: Boolean as PropType<boolean>,
default: false
},
from: {
type: Number as PropType<number>,
default: 0
},
processInstance: {
type: Object as PropType<any>
},
taskInstance: {
type: Object as PropType<IWorkflowTaskInstance>
}
}
const NodeDetailModal = defineComponent({
name: 'NodeDetailModal',
props,
emits: ['cancel', 'submit', 'viewLog'],
setup(props, { emit }) {
const { t } = useI18n()
const router: Router = useRouter()
const renderIcon = (icon: any) => {
return () => h(NIcon, null, { default: () => h(icon) })
}
const detailRef = ref()
const onConfirm = async () => {
await detailRef.value.value.validate()
emit('submit', { data: detailRef.value.value.getValues() })
}
const onCancel = () => {
emit('cancel')
}
const headerLinks = ref([] as any)
const handleViewLog = () => {
if (props.taskInstance) {
emit('viewLog', props.taskInstance.id, props.taskInstance.taskType)
}
}
const initHeaderLinks = (processInstance: any) => {
headerLinks.value = [
{
text: t('project.node.view_history'),
show: true,
action: () => {
router.push({
name: 'task-instance',
params: { processInstanceId: processInstance.id }
})
},
icon: renderIcon(HistoryOutlined)
},
{
text: t('project.node.view_log'),
show: props.taskInstance ? true : false,
action: () => {
handleViewLog()
},
icon: renderIcon(ProfileOutlined)
}
]
}
const onTaskTypeChange = (taskType: ITaskType) => {
props.data.taskType = taskType
}
provide(
'data',
computed(() => ({
projectCode: props.projectCode,
data: props.data,
from: props.from,
readonly: props.readonly
}))
)
watch(
() => [props.show, props.data],
async () => {
if (!props.show) return
if (props.processInstance) {
initHeaderLinks(props.processInstance)
}
await nextTick()
detailRef.value.value.setValues(formatModel(props.data))
}
)
return () => (
<Modal
show={props.show}
title={`${t('project.node.current_node_settings')}`}
onConfirm={onConfirm}
confirmLoading={false}
confirmDisabled={props.readonly}
onCancel={onCancel}
headerLinks={headerLinks}
>
<Detail
ref={detailRef}
onTaskTypeChange={onTaskTypeChange}
key={props.data.taskType}
/>
</Modal>
)
}
})
export default NodeDetailModal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | .github/CODEOWNERS | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
dolphinscheduler/dolphinscheduler-alert @kezhenxu94
dolphinscheduler/dolphinscheduler-e2e @kezhenxu94
dolphinscheduler/dolphinscheduler-registry @kezhenxu94
dolphinscheduler/dolphinscheduler-standalone-server @kezhenxu94
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-dist/release-docs/LICENSE | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
Apache DolphinScheduler Subcomponents:
The Apache DolphinScheduler project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Apache 2.0 licenses
========================================================================
The following components are provided under the Apache License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0
apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0
tomcat-embed-el 9.0.54: https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-el/9.0.54, Apache 2.0
api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0
api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0
audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0
avro 1.7.4: https://github.com/apache/avro, Apache 2.0
aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0
bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0
byte-buddy 1.9.16: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.16, Apache 2.0
caffeine 2.9.2: https://mvnrepository.com/artifact/com.github.ben-manes.caffeine/caffeine/2.9.2, Apache 2.0
classmate 1.5.1: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.5.1, Apache 2.0
clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0
commons-beanutils 1.9.4 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.9.4, Apache 2.0
commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0
commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0
commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0
commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0
commons-compress 1.19: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.19, Apache 2.0
commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0
commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0
commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0
commons-email 1.5: https://github.com/apache/commons-email, Apache 2.0
commons-httpclient 3.0.1: https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient/3.0.1, Apache 2.0
commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0
commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0
commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0
commons-math3 3.1.1: https://mvnrepository.com/artifact/org.apache.commons/commons-math3/3.1.1, Apache 2.0
commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0
commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0
cron-utils 9.1.3: https://mvnrepository.com/artifact/com.cronutils/cron-utils/9.1.3, Apache 2.0
commons-lang3 3.12.0: https://mvnrepository.com/artifact/org.apache.commons/commons-lang3/3.12.0, Apache 2.0
curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0
curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0
curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0
curator-test 2.12.0: https://mvnrepository.com/artifact/org.apache.curator/curator-test/2.12.0, Apache 2.0
datanucleus-api-jdo 4.2.1: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-api-jdo/4.2.1, Apache 2.0
datanucleus-core 4.1.6: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-core/4.1.6, Apache 2.0
datanucleus-rdbms 4.1.7: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-rdbms/4.1.7, Apache 2.0
derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0
druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0
error_prone_annotations 2.1.3 https://mvnrepository.com/artifact/com.google.errorprone/error_prone_annotations/2.1.3, Apache 2.0
gson 2.8.8: https://github.com/google/gson, Apache 2.0
guava 24.1-jre: https://mvnrepository.com/artifact/com.google.guava/guava/24.1-jre, Apache 2.0
guava-retrying 2.0.0: https://mvnrepository.com/artifact/com.github.rholder/guava-retrying/2.0.0, Apache 2.0
guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0
guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0
hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0
hadoop-auth 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-auth/2.7.3, Apache 2.0
hadoop-aws 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/2.7.3, Apache 2.0
hadoop-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client/2.7.3, Apache 2.0
hadoop-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common/2.7.3, Apache 2.0
hadoop-hdfs 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs/2.7.3, Apache 2.0
hadoop-mapreduce-client-app 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app/2.7.3, Apache 2.0
hadoop-mapreduce-client-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-common/2.7.3, Apache 2.0
hadoop-mapreduce-client-core 2.7.3: https://mvnrepository.com/artifact/io.hops/hadoop-mapreduce-client-core/2.7.3, Apache 2.0
hadoop-mapreduce-client-jobclient 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-jobclient/2.7.3, Apache 2.0
hadoop-mapreduce-client-shuffle 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-shuffle/2.7.3, Apache 2.0
hadoop-yarn-api 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-api/2.7.3, Apache 2.0
hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0
hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0
hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0
HikariCP 4.0.3: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/4.0.3, Apache 2.0
hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0
hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0
hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0
hive-orc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-orc/2.1.0, Apache 2.0
hive-serde 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-serde/2.1.0, Apache 2.0
hive-service 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service/2.1.0, Apache 2.0
hive-service-rpc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service-rpc/2.1.0, Apache 2.0
hive-storage-api 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-storage-api/2.1.0, Apache 2.0
htrace-core 3.1.0-incubating: https://mvnrepository.com/artifact/org.apache.htrace/htrace-core/3.1.0-incubating, Apache 2.0
httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0
httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0
httpmime 4.5.13: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.13, Apache 2.0
jackson-annotations 2.10.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.10.5, Apache 2.0
jackson-core 2.10.5: https://github.com/FasterXML/jackson-core, Apache 2.0
jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0
jackson-databind 2.10.5: https://github.com/FasterXML/jackson-databind, Apache 2.0
jackson-datatype-jdk8 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.12.5, Apache 2.0
jackson-datatype-jsr310 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.12.5, Apache 2.0
jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1
jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0
jackson-module-parameter-names 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.12.5, Apache 2.0
jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1
javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0
javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0
java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0
jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0
jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0
jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0
jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0
jetty-continuation 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-http 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-io 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-security 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-server 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-servlet 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-servlets 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0
jetty-util 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-util-ajax 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util-ajax/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-webapp 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jetty-xml 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.44.v20210927, Apache 2.0 and EPL 1.0
jna 5.10.0: https://mvnrepository.com/artifact/net.java.dev.jna/jna/5.10.0, Apache 2.0 and LGPL 2.1
jna-platform 5.10.0: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/5.10.0, Apache 2.0 and LGPL 2.1
joda-time 2.10.13: https://github.com/JodaOrg/joda-time, Apache 2.0
jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0
jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1
jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0
j2objc-annotations 1.1 https://mvnrepository.com/artifact/com.google.j2objc/j2objc-annotations/1.1, Apache 2.0
libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0
libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0
log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0
log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0
log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0
log4j-1.2-api 2.14.1: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.14.1, Apache 2.0
lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0
mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0
mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0
mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0
mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0
mybatis-plus-boot-starter 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter/3.2.0, Apache 2.0
mybatis-plus-core 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-core/3.2.0, Apache 2.0
mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0
mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0
netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0
netty 4.1.53.Final: https://github.com/netty/netty/blob/netty-4.1.53.Final/LICENSE.txt, Apache 2.0
opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0
parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0
poi 4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi/4.1.2, Apache 2.0
poi-ooxml 4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml/4.1.2, Apache 2.0
poi-ooxml-schemas-4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml-schemas/4.1.2, Apache 2.0
quartz 2.3.2: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.2, Apache 2.0
quartz-jobs 2.3.2: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.2, Apache 2.0
snakeyaml 1.28: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.28, Apache 2.0
snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0
snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0
SparseBitSet 1.2: https://mvnrepository.com/artifact/com.zaxxer/SparseBitSet, Apache 2.0
spring-aop 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.3.12, Apache 2.0
spring-beans 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.3.12, Apache 2.0
spring-boot 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.5.6, Apache 2.0
spring-boot-actuator 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-actuator/2.5.6, Apache 2.0
spring-boot-actuator-autoconfigure 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-actuator-autoconfigure/2.5.6, Apache 2.0
spring-boot-configuration-processor 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-configuration-processor/2.5.6, Apache 2.0
spring-boot-autoconfigure 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.5.6, Apache 2.0
spring-boot-starter 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.5.6, Apache 2.0
spring-boot-starter-actuator 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-actuator/2.5.6, Apache 2.0
spring-boot-starter-aop 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.5.6, Apache 2.0
spring-boot-starter-jdbc 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.5.6, Apache 2.0
spring-boot-starter-jetty 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.5.6, Apache 2.0
spring-boot-starter-json 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.5.6, Apache 2.0
spring-boot-starter-logging 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.5.6, Apache 2.0
spring-boot-starter-quartz 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-quartz/2.5.6, Apache 2.0
spring-boot-starter-web 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.5.6, Apache 2.0
spring-boot-starter-cache 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache/2.5.6, Apache 2.0
spring-context 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-context/5.3.12, Apache 2.0
spring-context-support 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-context-support/5.3.12, Apache 2.0
spring-core 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-core, Apache 2.0
spring-expression 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-expression, Apache 2.0
springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0
springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0
springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0
springfox-spring-web 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spring-web/2.9.2, Apache 2.0
springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0
springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0
springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0
spring-jcl 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.3.12, Apache 2.0
spring-jdbc 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.3.12, Apache 2.0
spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0
spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0
spring-tx 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.3.12, Apache 2.0
spring-web 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-web/5.3.12, Apache 2.0
spring-webmvc 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.3.12, Apache 2.0
swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0
swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0
swagger-models 1.5.24: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.24, Apache 2.0
tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0
tomcat-embed-el 9.0.54: https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-el/9.0.54, Apache 2.0
xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0
xmlbeans 3.1.0: https://mvnrepository.com/artifact/org.apache.xmlbeans/xmlbeans/3.1.0, Apache 2.0
xml-apis 1.3.04: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.3.04, Apache 2.0 and W3C
zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0
presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1
protostuff-core 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0
protostuff-runtime 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0
protostuff-api 1.7.2: https://github.com/protostuff/protostuff/protostuff-api Apache-2.0
protostuff-collectionschema 1.7.2: https://github.com/protostuff/protostuff/protostuff-collectionschema Apache-2.0
prometheus client_java(simpleclient) 0.12.0: https://github.com/prometheus/client_java, Apache 2.0
snowflake snowflake-2010: https://github.com/twitter-archive/snowflake/tree/snowflake-2010, Apache 2.0
kubernetes-client 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-client/5.8.0, Apache 2.0
kubernetes-model-admissionregistration 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-admissionregistration/5.8.0, Apache 2.0
kubernetes-model-apiextensions 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-apiextensions/5.8.0, Apache 2.0
kubernetes-model-apps 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-apps/5.8.0, Apache 2.0
kubernetes-model-autoscaling 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-autoscaling/5.8.0, Apache 2.0
kubernetes-model-batch 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-autoscaling/5.8.0, Apache 2.0
kubernetes-model-certificates 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-certificates/5.8.0, Apache 2.0
kubernetes-model-common 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-common/5.8.0, Apache 2.0
kubernetes-model-coordination 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-coordination/5.8.0, Apache 2.0
kubernetes-model-core 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-core/5.8.0, Apache 2.0
kubernetes-model-discovery 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-discovery/5.8.0, Apache 2.0
kubernetes-model-events 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-events/5.8.0, Apache 2.0
kubernetes-model-extensions 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-extensions/5.8.0, Apache 2.0
kubernetes-model-flowcontrol 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-flowcontrol/5.8.0, Apache 2.0
kubernetes-model-metrics 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-metrics/5.8.0, Apache 2.0
kubernetes-model-networking 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-networking/5.8.0, Apache 2.0
kubernetes-model-node 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-node/5.8.0, Apache 2.0
kubernetes-model-policy 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-policy/5.8.0, Apache 2.0
kubernetes-model-rbac 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-rbac/5.8.0, Apache 2.0
kubernetes-model-scheduling 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-scheduling/5.8.0, Apache 2.0
kubernetes-model-storageclass 5.8.0: https://mvnrepository.com/artifact/io.fabric8/kubernetes-model-storageclass/5.8.0, Apache 2.0
zjsonpatch 0.3.0 https://mvnrepository.com/artifact/io.fabric8/zjsonpatch/0.3.0, Apache 2.0
generex 1.0.2 https://mvnrepository.com/artifact/com.github.mifmif/generex/1.0.2, Apache 2.0
jackson-dataformat-yaml 2.12.5 https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-yaml/2.12.5, Apache 2.0
logging-interceptor 3.14.9 https://mvnrepository.com/artifact/com.squareup.okhttp3/logging-interceptor/3.14.9, Apache 2.0
okhttp 3.14.3 https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp/3.14.3, Apache 2.0
okio 1.17.2 https://mvnrepository.com/artifact/com.squareup.okio/okio/1.17.2, Apache 2.0
hibernate-validator 6.2.2.Final https://mvnrepository.com/artifact/org.hibernate.validator/hibernate-validator/6.2.2.Final, Apache 2.0
jakarta.validation-api 2.0.2 https://mvnrepository.com/artifact/jakarta.validation/jakarta.validation-api/2.0.2, Apache 2.0
jboss-logging:jar 3.4.2.Final https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.4.2.Final, Apache 2.0
ion-java 1.0.2 https://mvnrepository.com/artifact/software.amazon.ion/ion-java/1.0.2 Apache 2.0
jmespath-java 1.12.160 https://mvnrepository.com/artifact/com.amazonaws/jmespath-java/1.12.160 Apache 2.0
jackson-dataformat-cbor 2.12.5 https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-cbor/2.12.5 Apache 2.0
aws-java-sdk-emr 1.12.160 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-emr/1.12.160 Apache 2.0
aws-java-sdk-core 1.12.160 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-core/1.12.160 Apache 2.0
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
asm 3.1: https://github.com/jdf/javalin/tree/master/lib/asm-3.1, BSD
curvesapi 1.06: https://mvnrepository.com/artifact/com.github.virtuald/curvesapi/1.06, BSD 3-clause
javolution 5.5.1: https://mvnrepository.com/artifact/javolution/javolution/5.5.1, BSD
jline 0.9.94: https://github.com/jline/jline3, BSD
jsch 0.1.42: https://mvnrepository.com/artifact/com.jcraft/jsch/0.1.42, BSD
leveldbjni-all 1.8: https://github.com/fusesource/leveldbjni, BSD-3-Clause
postgresql 42.2.5: https://mvnrepository.com/artifact/org.postgresql/postgresql/42.2.5, BSD 2-clause
protobuf-java 2.5.0: https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java/2.5.0, BSD 2-clause
paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD
threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause
xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD
py4j 0.10.9: https://mvnrepository.com/artifact/net.sf.py4j/py4j/0.10.9, BSD 2-clause
LatencyUtils 2.0.3: https://github.com/LatencyUtils/LatencyUtils, BSD-2-Clause
janino 3.1.6: https://mvnrepository.com/artifact/org.codehaus.janino/janino/3.1.6, BSD 3-clause
commons-compiler 3.1.6: https://mvnrepository.com/artifact/org.codehaus.janino/janino/3.1.6, BSD 3-clause
automaton 1.11-8 https://mvnrepository.com/artifact/dk.brics.automaton/automaton/1.11-8, BSD 2-clause
========================================================================
CDDL licenses
========================================================================
The following components are provided under the CDDL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
activation 1.1: https://mvnrepository.com/artifact/javax.activation/activation/1.1 CDDL 1.0
javax.activation-api 1.2.0: https://mvnrepository.com/artifact/javax.activation/javax.activation-api/1.2.0, CDDL and LGPL 2.0
javax.annotation-api 1.3.2: https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api/1.3.2, CDDL + GPLv2
javax.mail 1.6.2: https://mvnrepository.com/artifact/com.sun.mail/javax.mail/1.6.2, CDDL/GPLv2
javax.servlet-api 3.1.0: https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api/3.1.0, CDDL + GPLv2
jaxb-api 2.3.1: https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api/2.3.1, CDDL 1.1
jaxb-impl 2.2.3-1: https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl/2.2.3-1, CDDL and GPL 1.1
jersey-client 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-client/1.9, CDDL 1.1 and GPL 1.1
jersey-core 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-core/1.9, CDDL 1.1 and GPL 1.1
jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1
jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1
jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1
jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0
transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0
javax.el 3.0.0: https://mvnrepository.com/artifact/org.glassfish/javax.el/3.0.0, CDDL and GPL and GPL 2.0
========================================================================
EPL licenses
========================================================================
The following components are provided under the EPL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
aspectjweaver 1.9.7:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.7, EPL 1.0
logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1
logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1
h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0
========================================================================
MIT licenses
========================================================================
The following components are provided under a MIT 2.0 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jul-to-slf4j 1.7.32: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.32, MIT
mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT
slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT
animal-sniffer-annotations 1.14 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.14, MIT
checker-compat-qual 2.0.0 https://mvnrepository.com/artifact/org.checkerframework/checker-compat-qual/2.0.0, MIT + GPLv2
checker-qual 3.10.0 https://mvnrepository.com/artifact/org.checkerframework/checker-qual/3.10.0, MIT + GPLv2
Java-WebSocket 1.5.1: https://github.com/TooTallNate/Java-WebSocket MIT
oshi-core 6.1.1: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/6.1.1, MIT
========================================================================
MPL 1.1 licenses
========================================================================
The following components are provided under a MPL 1.1 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jamon-runtime 2.3.1: https://mvnrepository.com/artifact/org.jamon/jamon-runtime/2.3.1, MPL-1.1
javassist 3.27.0-GA: https://github.com/jboss-javassist/javassist, MPL-1.1
========================================================================
Public Domain licenses
========================================================================
aopalliance 1.0: https://mvnrepository.com/artifact/aopalliance/aopalliance/1.0, Public Domain
========================================
WTFPL License
========================================
reflections 0.9.12: https://github.com/ronmamo/reflections WTFPL
========================================
CC0-1.0 licenses
========================================
HdrHistogram 2.1.12: https://github.com/HdrHistogram/HdrHistogram , CC0-1.0 and BSD 2-Clause
========================================================================
UI related licenses
========================================================================
The following components are used in UI.See project link for details.
The text of each license is also included at licenses/ui-licenses/LICENSE-[project].txt.
========================================
MIT licenses
========================================
@form-create/element-ui 1.0.18: https://github.com/xaboy/form-create MIT
axios 0.16.2: https://github.com/axios/axios MIT
bootstrap 3.3.7: https://github.com/twbs/bootstrap MIT
canvg 1.5.1: https://github.com/canvg/canvg MIT
clipboard 2.0.1: https://github.com/zenorocha/clipboard.js MIT
codemirror 5.43.0: https://github.com/codemirror/CodeMirror MIT
dayjs 1.7.8: https://github.com/iamkun/dayjs MIT
element-ui 2.13.2: https://github.com/ElemeFE/element MIT
html2canvas 0.5.0-beta4: https://github.com/niklasvh/html2canvas MIT
jquery 3.3.1: https://github.com/jquery/jquery MIT
jquery-ui 1.12.1: https://github.com/jquery/jquery-ui MIT
js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT
jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2
lodash 4.17.11: https://github.com/lodash/lodash MIT
moment-timezone 0.5.33: https://github.com/moment/moment-timezone MIT
vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT
vue 2.5.17: https://github.com/vuejs/vue MIT
vue-router 2.7.0: https://github.com/vuejs/vue-router MIT
vuex 3.0.0: https://github.com/vuejs/vuex MIT
vuex-router-sync 4.1.2: https://github.com/vuejs/vuex-router-sync MIT
dagre 0.8.5: https://github.com/dagrejs/dagre MIT
js-cookie 3.0.1 https://github.com/js-cookie/js-cookie MIT
@types/js-cookie 3.0.1 https://github.com/DefinitelyTyped/DefinitelyTyped MIT
========================================
Apache 2.0 licenses
========================================
echarts 4.1.0: https://github.com/apache/incubator-echarts Apache-2.0
remixicon 2.5.0 https://github.com/Remix-Design/remixicon Apache-2.0
========================================
BSD licenses
========================================
d3 3.5.17: https://github.com/d3/d3 BSD-3-Clause
========================================================================
UI-Next related licenses
========================================================================
The following components are used in UI-Next.See project link for details.
The text of each license is also included at licenses/ui-next-licenses/LICENSE-[project].txt.
========================================
MIT licenses
========================================
@antv/layout 0.1.31: https://github.com/antvis/layout MIT
@antv/x6 1.30.1: https://github.com/antvis/x6 MIT
@vueuse/core 7.6.2: https://github.com/vueuse/vueuse MIT
axios 0.24.0: https://github.com/axios/axios MIT
date-fns 2.28.0: https://github.com/date-fns/date-fns MIT
lodash 4.17.21: https://github.com/lodash/lodash MIT
monaco-editor 0.31.1: https://github.com/microsoft/monaco-editor MIT
naive-ui 2.26.0: https://github.com/TuSimple/naive-ui MIT
nprogress 0.2.0: https://github.com/rstacruz/nprogress MIT
pinia 2.0.11: https://github.com/vuejs/pinia MIT
pinia-plugin-persistedstate 1.2.2: https://github.com/prazdevs/pinia-plugin-persistedstate MIT
vfonts 0.1.0: https://www.npmjs.com/package/vfonts MIT
vue 3.2.31: https://github.com/vuejs/core MIT
vue-i18n 9.2.0-beta.30: https://github.com/kazupon/vue-i18n MIT
vue-router 4.0.12: https://github.com/vuejs/router MIT
@types/node 16.11.25: https://github.com/DefinitelyTyped/DefinitelyTyped MIT
@types/nprogress 0.2.0: https://github.com/DefinitelyTyped/DefinitelyTyped MIT
@types/qs 6.9.7: https://github.com/DefinitelyTyped/DefinitelyTyped MIT
@vicons/antd 0.11.0: https://www.npmjs.com/package/@vicons/antd MIT
@vitejs/plugin-vue 1.10.2: https://github.com/vitejs/vite MIT
@vitejs/plugin-vue-jsx 1.3.7: https://github.com/vitejs/vite MIT
dart-sass 1.25.0: https://github.com/sass/dart-sass MIT
eslint 8.9.0: https://github.com/eslint/eslint MIT
eslint-config-prettier 8.4.0: https://github.com/prettier/eslint-config-prettier MIT
eslint-plugin-prettier 4.0.0: https://github.com/prettier/eslint-plugin-prettier MIT
eslint-plugin-vue 8.5.0: https://github.com/vuejs/eslint-plugin-vue MIT
prettier 2.5.1: https://github.com/prettier/prettier MIT
sass 1.49.8: https://github.com/sass/dart-sass MIT
sass-loader 12.6.0: https://github.com/webpack-contrib/sass-loader MIT
typescript-plugin-css-modules 3.4.0: https://github.com/mrmckeb/typescript-plugin-css-modules MIT
vite 2.8.4: https://github.com/vitejs/vite MIT
vite-plugin-compression 0.3.6: https://github.com/vbenjs/vite-plugin-compression MIT
vue-tsc 0.28.10: https://github.com/johnsoncodehk/volar MIT
========================================
Apache 2.0 licenses
========================================
echarts 5.3.0: https://github.com/apache/echarts Apache-2.0
typescript 4.5.5: https://github.com/Microsoft/TypeScript Apache-2.0
open-sans: Apache-2.0
roboto: Apache-2.0
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/ui-next-licenses/LICENSE-[project].txt.
qs 6.10.3: https://github.com/ljharb/qs BSD 3-Clause
@typescript-eslint/eslint-plugin 5.12.1: https://github.com/typescript-eslint/typescript-eslint BSD 2-clause
@typescript-eslint/parser 5.12.1: https://github.com/typescript-eslint/typescript-eslint BSD 2-clause
========================================
SIL licenses
========================================
fira-code: https://github.com/tonsky/FiraCode SIL
fira-sans: SIL
ibm-plex: SIL
inter: https://github.com/rsms/inter SIL
lato: SIL
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-dist/release-docs/licenses/python-api-licenses/LICENSE-click.txt | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-dist/release-docs/licenses/python-api-licenses/LICENSE-py4j.txt | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-dist/release-docs/licenses/python-api-licenses/LICENSE-ruamel.yaml.txt | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-python/pydolphinscheduler/DEVELOP.md | <!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Develop
pydolphinscheduler is python API for Apache DolphinScheduler, it just defines what workflow look like instead of
store or execute it. We here use [py4j][py4j] to dynamically access Java Virtual Machine.
## Setup Develop Environment
**PyDolphinScheduler** use GitHub to hold all source code, you should clone the code before you do same change.
```shell
git clone git@github.com:apache/dolphinscheduler.git
```
Now, we should install all dependence to make sure we could run test or check code style locally
```shell
cd dolphinscheduler/dolphinscheduler-python/pydolphinscheduler
pip install .[dev]
```
Next, we have to open pydolphinscheduler project in you editor. We recommend you use [pycharm][pycharm]
instead of [IntelliJ IDEA][idea] to open it. And you could just open directory
`dolphinscheduler-python/pydolphinscheduler` instead of `dolphinscheduler-python`.
## Brief Concept
Apache DolphinScheduler is design to define workflow by UI, and pydolphinscheduler try to define it by code. When
define by code, user usually do not care user, tenant, or queue exists or not. All user care about is created
a new workflow by the code his/her definition. So we have some **side object** in `pydolphinscheduler/side`
directory, their only check object exists or not, and create them if not exists.
### Process Definition
pydolphinscheduler workflow object name, process definition is also same name as Java object(maybe would be change to
other word for more simple).
### Tasks
pydolphinscheduler tasks object, we use tasks to define exact job we want DolphinScheduler do for us. For now,
we only support `shell` task to execute shell task. [This link][all-task] list all tasks support in DolphinScheduler
and would be implemented in the further.
## Code Style
We use [isort][isort] to automatically keep Python imports alphabetically, and use [Black][black] for code
formatter and [Flake8][flake8] for pep8 checker. If you use [pycharm][pycharm]or [IntelliJ IDEA][idea],
maybe you could follow [Black-integration][black-editor] to configure them in your environment.
Our Python API CI would automatically run code style checker and unittest when you submit pull request in
GitHub, you could also run static check locally.
```shell
# We recommend you run isort and Black before Flake8, because Black could auto fix some code style issue
# but Flake8 just hint when code style not match pep8
# Run Isort
isort .
# Run Black
black .
# Run Flake8
flake8
```
## Testing
pydolphinscheduler using [pytest][pytest] to test our codebase. GitHub Action will run our test when you create
pull request or commit to dev branch, with python version `3.6|3.7|3.8|3.9` and operating system `linux|macOS|windows`.
To test locally, you could directly run pytest after set `PYTHONPATH`
```shell
PYTHONPATH=src/ pytest
```
We try to keep pydolphinscheduler usable through unit test coverage. 90% test coverage is our target, but for
now, we require test coverage up to 85%, and each pull request leas than 85% would fail our CI step
`Tests coverage`. We use [coverage][coverage] to check our test coverage, and you could check it locally by
run command.
```shell
coverage run && coverage report
```
It would not only run unit test but also show each file coverage which cover rate less than 100%, and `TOTAL`
line show you total coverage of you code. If your CI failed with coverage you could go and find some reason by
this command output.
<!-- content -->
[py4j]: https://www.py4j.org/index.html
[pycharm]: https://www.jetbrains.com/pycharm
[idea]: https://www.jetbrains.com/idea/
[all-task]: https://dolphinscheduler.apache.org/en-us/docs/dev/user_doc/guide/task/shell.html
[pytest]: https://docs.pytest.org/en/latest/
[black]: https://black.readthedocs.io/en/stable/index.html
[flake8]: https://flake8.pycqa.org/en/latest/index.html
[black-editor]: https://black.readthedocs.io/en/stable/integrations/editors.html#pycharm-intellij-idea
[coverage]: https://coverage.readthedocs.io/en/stable/
[isort]: https://pycqa.github.io/isort/index.html
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-python/pydolphinscheduler/LICENSE | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,675 | [Feature][Python] Add LICENSE and NOTICE to Python API | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Add LICENSE and NOTICE to python API
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8675 | https://github.com/apache/dolphinscheduler/pull/8768 | 27c5944c2d7111a731eb42d57097ba4a9def2fe9 | 62e12c73dc28dd0b671bf09db33bf9a4f8aedbe3 | "2022-03-03T05:39:03Z" | java | "2022-03-09T11:50:43Z" | dolphinscheduler-python/pydolphinscheduler/NOTICE | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,771 | [Bug][UI Next][V1.0.0-Alpha] Workflow Duplicate key error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
<img width="1909" alt="image" src="https://user-images.githubusercontent.com/8847400/157403000-5dc5ab06-1ae5-4bf5-9a3e-5655b2530c94.png">
### What you expected to happen
Workflow update success.
### How to reproduce
Create a workflow and update it twice in a row.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8771 | https://github.com/apache/dolphinscheduler/pull/8782 | 6a02b15e40d04e98891276b0e4f78312de602436 | 5ca886fdd1532328577e02ec3a5db38ac6268442 | "2022-03-09T08:35:07Z" | java | "2022-03-09T14:16:33Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/use-task-edit.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, onMounted, watch } from 'vue'
import type { Ref } from 'vue'
import type { Graph } from '@antv/x6'
import type { Coordinate, NodeData } from './types'
import { TaskType } from '@/views/projects/task/constants/task-type'
import { formatParams } from '@/views/projects/task/components/node/format-data'
import { useCellUpdate } from './dag-hooks'
import { WorkflowDefinition } from './types'
interface Options {
graph: Ref<Graph | undefined>
definition: Ref<WorkflowDefinition | undefined>
}
/**
* Edit task configuration when dbclick
* @param {Options} options
* @returns
*/
export function useTaskEdit(options: Options) {
const { graph, definition } = options
const { addNode, setNodeName } = useCellUpdate({ graph })
const taskDefinitions = ref<NodeData[]>(
definition.value?.taskDefinitionList || []
)
const currTask = ref<NodeData>({
taskType: 'SHELL',
code: 0,
name: ''
})
const taskModalVisible = ref(false)
/**
* Append a new task
*/
function appendTask(code: number, type: TaskType, coordinate: Coordinate) {
addNode(code + '', type, '', 'YES', coordinate)
taskDefinitions.value.push({
code,
taskType: type,
name: ''
})
openTaskModal({ code, taskType: type, name: '' })
}
/**
* Copy a task
*/
function copyTask(
name: string,
code: number,
targetCode: number,
type: TaskType,
flag: string,
coordinate: Coordinate
) {
addNode(code + '', type, name, flag, coordinate)
const definition = taskDefinitions.value.find((t) => t.code === targetCode)
const newDefinition = {
...definition,
code,
name
} as NodeData
taskDefinitions.value.push(newDefinition)
}
/**
* Remove task
* @param {number} code
*/
function removeTasks(codes: number[]) {
taskDefinitions.value = taskDefinitions.value.filter(
(task) => !codes.includes(task.code)
)
}
function openTaskModal(task: NodeData) {
currTask.value = task
taskModalVisible.value = true
}
/**
* Edit task
* @param {number} code
*/
function editTask(code: number) {
const definition = taskDefinitions.value.find((t) => t.code === code)
if (definition) {
currTask.value = definition
}
taskModalVisible.value = true
}
/**
* The confirm event in task config modal
* @param formRef
* @param from
*/
function taskConfirm({ data }: any) {
const taskDef = formatParams(data).taskDefinitionJsonObj as NodeData
// override target config
taskDefinitions.value = taskDefinitions.value.map((task) => {
if (task.code === currTask.value?.code) {
setNodeName(task.code + '', taskDef.name)
return {
...taskDef,
code: task.code,
taskType: currTask.value.taskType
}
}
return task
})
taskModalVisible.value = false
}
/**
* The cancel event in task config modal
*/
function taskCancel() {
taskModalVisible.value = false
}
onMounted(() => {
if (graph.value) {
graph.value.on('cell:dblclick', ({ cell }) => {
const code = Number(cell.id)
editTask(code)
})
}
})
watch(definition, () => {
taskDefinitions.value = definition.value?.taskDefinitionList || []
})
return {
currTask,
taskModalVisible,
taskConfirm,
taskCancel,
appendTask,
editTask,
copyTask,
taskDefinitions,
removeTasks
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,740 | [Bug][UI Next][V1.0.0-Alpha] Missing pagination in version info table. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![1646645939](https://user-images.githubusercontent.com/19239641/157006133-12acc88e-8e9e-4457-b1d4-6b34cc70b3b7.jpg)
![image](https://user-images.githubusercontent.com/19239641/157006184-4ef57764-528c-48e5-888f-5ed9942c4ca2.png)
### What you expected to happen
Missing pagination in version info table.
### How to reproduce
Supplemental pagination.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8740 | https://github.com/apache/dolphinscheduler/pull/8785 | 5ca886fdd1532328577e02ec3a5db38ac6268442 | 97a0eede25d27c8b603099c7c1592c70e468c6c0 | "2022-03-07T09:41:09Z" | java | "2022-03-09T14:31:38Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { h, ref, reactive, SetupContext } from 'vue'
import { useRouter } from 'vue-router'
import { useI18n } from 'vue-i18n'
import { NSpace, NTooltip, NButton, NPopconfirm, NTag } from 'naive-ui'
import {
deleteVersion,
queryVersions,
switchVersion
} from '@/service/modules/process-definition'
import type { Router } from 'vue-router'
import type { TableColumns } from 'naive-ui/es/data-table/src/interface'
import { DeleteOutlined, ExclamationCircleOutlined } from '@vicons/antd'
import styles from '../index.module.scss'
export function useTable(
ctx: SetupContext<('update:show' | 'update:row' | 'updateList')[]>
) {
const { t } = useI18n()
const router: Router = useRouter()
const columns: TableColumns<any> = [
{
title: '#',
key: 'id',
width: 50,
render: (_row, index) => index + 1
},
{
title: t('project.workflow.version'),
key: 'version',
render: (_row) => {
if (_row.version === variables.row.version) {
return h(
NTag,
{ type: 'success', size: 'small' },
{
default: () =>
`V${_row.version} ${t('project.workflow.current_version')}`
}
)
} else {
return `V${_row.version}`
}
}
},
{
title: t('project.workflow.description'),
key: 'description'
},
{
title: t('project.workflow.create_time'),
key: 'createTime'
},
{
title: t('project.workflow.operation'),
key: 'operation',
className: styles.operation,
render: (_row) => {
return h(NSpace, null, {
default: () => [
h(
NPopconfirm,
{
onPositiveClick: () => {
handleSwitchVersion(_row.version)
}
},
{
trigger: () =>
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'info',
size: 'tiny',
disabled: _row.version === variables.row.version
},
{
icon: () => h(ExclamationCircleOutlined)
}
),
default: () => t('project.workflow.switch_version')
}
),
default: () => t('project.workflow.confirm_switch_version')
}
),
h(
NPopconfirm,
{
onPositiveClick: () => {
handleDeleteVersion(_row.version)
}
},
{
trigger: () =>
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'error',
size: 'tiny',
disabled: _row.version === variables.row.version
},
{
icon: () => h(DeleteOutlined)
}
),
default: () => t('project.workflow.delete')
}
),
default: () => t('project.workflow.delete_confirm')
}
)
]
})
}
}
]
const variables = reactive({
columns,
row: {} as any,
tableData: [],
projectCode: ref(Number(router.currentRoute.value.params.projectCode))
})
const getTableData = (row: any) => {
variables.row = row
const params = {
pageSize: 10,
pageNo: 1
}
queryVersions(
{ ...params },
variables.projectCode,
variables.row.code
).then((res: any) => {
variables.tableData = res.totalList.map((item: any) => ({ ...item }))
})
}
const handleSwitchVersion = (version: number) => {
switchVersion(variables.projectCode, variables.row.code, version)
.then(() => {
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
getTableData(variables.row)
})
.catch((error: any) => {
window.$message.error(error.message)
})
}
const handleDeleteVersion = (version: number) => {
deleteVersion(variables.projectCode, variables.row.code, version)
.then(() => {
window.$message.success(t('project.workflow.success'))
ctx.emit('updateList')
getTableData(variables.row)
})
.catch((error: any) => {
window.$message.error(error.message)
})
}
return {
variables,
getTableData
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,740 | [Bug][UI Next][V1.0.0-Alpha] Missing pagination in version info table. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![1646645939](https://user-images.githubusercontent.com/19239641/157006133-12acc88e-8e9e-4457-b1d4-6b34cc70b3b7.jpg)
![image](https://user-images.githubusercontent.com/19239641/157006184-4ef57764-528c-48e5-888f-5ed9942c4ca2.png)
### What you expected to happen
Missing pagination in version info table.
### How to reproduce
Supplemental pagination.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8740 | https://github.com/apache/dolphinscheduler/pull/8785 | 5ca886fdd1532328577e02ec3a5db38ac6268442 | 97a0eede25d27c8b603099c7c1592c70e468c6c0 | "2022-03-07T09:41:09Z" | java | "2022-03-09T14:31:38Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/version-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, watch } from 'vue'
import { useI18n } from 'vue-i18n'
import { NDataTable } from 'naive-ui'
import Modal from '@/components/modal'
import { useForm } from './use-form'
import { useModal } from './use-modal'
import { useTable } from './use-table'
import { IDefinitionData } from '../types'
import styles from '../index.module.scss'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
row: {
type: Object as PropType<IDefinitionData>,
default: {}
}
}
export default defineComponent({
name: 'workflowDefinitionVersion',
props,
emits: ['update:show', 'update:row', 'updateList'],
setup(props, ctx) {
const { variables, getTableData } = useTable(ctx)
const { importState } = useForm()
const { handleImportDefinition } = useModal(importState, ctx)
const hideModal = () => {
ctx.emit('update:show')
}
const handleImport = () => {
handleImportDefinition()
}
const customRequest = ({ file }: any) => {
importState.importForm.name = file.name
importState.importForm.file = file.file
}
watch(
() => props.show,
() => {
if (props.show && props.row?.code) {
getTableData(props.row)
}
}
)
return {
hideModal,
handleImport,
customRequest,
...toRefs(variables)
}
},
render() {
const { t } = useI18n()
return (
<Modal
show={this.$props.show}
title={t('project.workflow.version_info')}
onCancel={this.hideModal}
onConfirm={this.hideModal}
>
<NDataTable
columns={this.columns}
data={this.tableData}
striped
size={'small'}
class={styles.table}
/>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,786 | [Bug] [ui] the "RESOURCE_NOT_EXIST" error message when clicking udF resource list file navigation prompt | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Click on the folder in the UDF resource list and the navigation prompts "RESOURCE_NOT_EXIST"
### What you expected to happen
After the interface is upgraded, you need to input the ID when clicking the navigation.
### How to reproduce
In the resource management function, click the folder in the UDF resource list to navigate
### Anything else
Expect a fix in the next version
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8786 | https://github.com/apache/dolphinscheduler/pull/8788 | 97a0eede25d27c8b603099c7c1592c70e468c6c0 | 0bce1a96a14cf46e9d5852d9778da02cbc1a2560 | "2022-03-09T14:52:06Z" | java | "2022-03-10T03:31:37Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/subUdfDirectory/index.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="home-main list-construction-model">
<div class="content-title">
<a class="bread" style="padding-left: 15px;" @click="() => $router.push({path: `/resource/udf`})">{{$t('UDF Resources')}}</a>
<a class="bread" v-for="(item,$index) in breadList" :key="$index" @click="_ckOperation($index)">{{'>'+item}}</a>
</div>
<div class="conditions-box">
<m-conditions @on-conditions="_onConditions">
<template slot="button-group">
<el-button-group size="small" >
<el-button size="mini" @click="() => $router.push({name: 'resource-udf-subCreateUdfFolder'})">{{$t('Create folder')}}</el-button>
<el-button size="mini" @click="_uploading">{{$t('Upload UDF Resources')}}</el-button>
</el-button-group>
</template>
</m-conditions>
</div>
<div class="list-box">
<template v-if="udfResourcesList.length || total>0">
<m-list @on-update="_onUpdate" :udf-resources-list="udfResourcesList" :page-no="searchParams.pageNo" :page-size="searchParams.pageSize">
</m-list>
<div class="page-box">
<el-pagination
background
@current-change="_page"
@size-change="_pageSize"
:page-size="searchParams.pageSize"
:current-page.sync="searchParams.pageNo"
:page-sizes="[10, 30, 50]"
layout="sizes, prev, pager, next, jumper"
:total="total">
</el-pagination>
</div>
</template>
<template v-if="!udfResourcesList.length && total<=0">
<m-no-data></m-no-data>
</template>
<m-spin :is-spin="isLoading" :is-left="isLeft">
</m-spin>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mList from './_source/list'
import localStore from '@/module/util/localStorage'
import mSpin from '@/module/components/spin/spin'
import { findComponentDownward } from '@/module/util/'
import mNoData from '@/module/components/noData/noData'
import listUrlParamHandle from '@/module/mixin/listUrlParamHandle'
import mConditions from '@/module/components/conditions/conditions'
export default {
name: 'resource-list-index-UDF',
data () {
return {
total: null,
isLoading: false,
udfResourcesList: [],
searchParams: {
id: this.$route.params.id,
pageSize: 10,
pageNo: 1,
searchVal: '',
type: 'UDF'
},
isLeft: true,
breadList: []
}
},
mixins: [listUrlParamHandle],
props: {},
methods: {
...mapActions('resource', ['getResourcesListP', 'getResourceId']),
/**
* File Upload
*/
_uploading () {
findComponentDownward(this.$root, 'roof-nav')._resourceChildUpdate('UDF', this.searchParams.id)
},
_onConditions (o) {
this.searchParams = _.assign(this.searchParams, o)
this.searchParams.pageNo = 1
},
_page (val) {
this.searchParams.pageNo = val
},
_pageSize (val) {
this.searchParams.pageSize = val
},
_onUpdate () {
this.searchParams.id = this.$route.params.id
this._debounceGET()
},
_updateList (data) {
this.searchParams.id = data
this.searchParams.pageNo = 1
this.searchParams.searchVal = ''
this._debounceGET()
},
_getList (flag) {
if (sessionStorage.getItem('isLeft') === 0) {
this.isLeft = false
} else {
this.isLeft = true
}
this.isLoading = !flag
this.searchParams.id = this.$route.params.id
this.getResourcesListP(this.searchParams).then(res => {
if (this.searchParams.pageNo > 1 && res.totalList.length === 0) {
this.searchParams.pageNo = this.searchParams.pageNo - 1
} else {
this.udfResourcesList = []
this.udfResourcesList = res.totalList
this.total = res.total
this.isLoading = false
}
}).catch(e => {
this.isLoading = false
})
},
_ckOperation (index) {
let breadName = ''
this.breadList.forEach((item, i) => {
if (i <= index) {
breadName = breadName + '/' + item
}
})
this.transferApi(breadName)
},
transferApi (api) {
this.getResourceId({
type: 'UDF',
fullName: api
}).then(res => {
localStore.setItem('currentDir', `${res.fullName}`)
this.$router.push({ path: `/resource/udf/subUdfDirectory/${res.id}` })
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
// router
'$route' (a) {
// url no params get instance list
this.searchParams.pageNo = _.isEmpty(a.query) ? 1 : a.query.pageNo
this.searchParams.id = a.params.id
let dir = localStore.getItem('currentDir').split('/')
dir.shift()
this.breadList = dir
}
},
created () {
},
mounted () {
let dir = localStore.getItem('currentDir').split('/')
dir.shift()
this.breadList = dir
},
beforeDestroy () {
sessionStorage.setItem('isLeft', 1)
},
components: { mConditions, mList, mSpin, mNoData }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.bread {
font-size: 22px;
padding-top: 10px;
color: #2a455b;
display: inline-block;
cursor: pointer;
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,746 | [Bug][UI] The copy node function will affect the original node | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When using dataX tasks, the copy node function will affect the original node.
### What you expected to happen
Using the copy node function, the copied node should be independent and should not affect the original node.
### How to reproduce
1.Add a dataX task node
<img width="850" alt="image" src="https://user-images.githubusercontent.com/51101283/157145576-fb8b2f5a-56ac-4b9d-9758-6ee6991836a2.png">
2.Using the copy node feature
<img width="333" alt="image" src="https://user-images.githubusercontent.com/51101283/157145629-f567ab01-0eda-4719-a2e3-9645f596bbff.png">
<img width="285" alt="image" src="https://user-images.githubusercontent.com/51101283/157145676-920719d5-333c-4ae9-a677-b1baa484efa7.png">
3.Modify and save the copied node
<img width="809" alt="image" src="https://user-images.githubusercontent.com/51101283/157145736-b615e307-6df4-4119-abed-658ceeb47b3f.png">
4.View the original node again(The original node configuration is automatically modified)
<img width="713" alt="image" src="https://user-images.githubusercontent.com/51101283/157145815-72fef3bd-4495-4208-aeac-1ebef0e6ea7c.png">
### Anything else
_No response_
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8746 | https://github.com/apache/dolphinscheduler/pull/8791 | 0bce1a96a14cf46e9d5852d9778da02cbc1a2560 | 41b66b6adbf286671a9d9682d04b769ad9d21003 | "2022-03-08T01:12:35Z" | java | "2022-03-10T03:36:01Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div
class="dag-context-menu"
v-show="visible"
:style="{
left: `${left}px`,
top: `${top}px`,
}"
>
<menu-item :disabled="!startAvailable" @on-click="onStart">
{{ $t("Start") }}
</menu-item>
<menu-item :disabled="readOnly" @on-click="onEdit">
{{ $t("Edit") }}
</menu-item>
<menu-item :disabled="readOnly" @on-click="onCopy">
{{ $t("Copy") }}
</menu-item>
<menu-item :disabled="readOnly" @on-click="onDelete">
{{ $t("Delete") }}
</menu-item>
<menu-item v-if="dagChart.type === 'instance'" :disabled="!logMenuVisible" @on-click="showLog">
{{ $t('View log') }}
</menu-item>
</div>
</template>
<script>
import { mapState, mapActions, mapMutations } from 'vuex'
import { findComponentDownward, uuid } from '@/module/util/'
import MenuItem from './menuItem.vue'
export default {
name: 'dag-context-menu',
inject: ['dagChart', 'dagCanvas'],
components: {
MenuItem
},
data () {
return {
visible: false,
left: 0,
top: 0,
canvasRef: null,
currentTask: {
code: 0,
name: '',
type: ''
}
}
},
computed: {
...mapState('dag', ['isDetails', 'releaseState', 'tasks']),
startAvailable () {
return (
this.$route.name === 'projects-definition-details' &&
this.releaseState !== 'NOT_RELEASE'
)
},
readOnly () {
return this.isDetails
},
logMenuVisible () {
if (this.dagChart.taskInstances.length > 0) {
return !!this.dagChart.taskInstances.find(taskInstance => taskInstance.taskCode === this.currentTask.code)
}
return true
}
},
mounted () {
document.addEventListener('click', (e) => {
this.hide()
})
},
methods: {
...mapActions('dag', ['genTaskCodeList']),
...mapMutations('dag', ['addTask']),
getDagCanvasRef () {
if (this.canvasRef) {
return this.canvasRef
} else {
const canvas = findComponentDownward(this.dagChart, 'dag-canvas')
this.canvasRef = canvas
return canvas
}
},
setCurrentTask (task) {
this.currentTask = { ...this.currentTask, ...task }
},
onStart () {
this.dagChart.startRunning(this.currentTask.code)
},
onEdit () {
this.dagChart.openFormModel(this.currentTask.code, this.currentTask.type)
},
onCopy () {
const nodes = this.dagCanvas.getNodes()
const targetNode = nodes.find(
(node) => node.id === this.currentTask.code
)
const targetTask = this.tasks.find(
(task) => task.code === this.currentTask.code
)
if (!targetNode || !targetTask) return
this.genTaskCodeList({
genNum: 1
})
.then((res) => {
const [code] = res
const taskName = uuid(targetTask.name + '_')
const task = {
...targetTask,
code,
name: taskName
}
this.dagCanvas.addNode(code, this.currentTask.type, {
x: targetNode.position.x + 100,
y: targetNode.position.y + 100
})
this.addTask(task)
this.dagCanvas.setNodeName(code, taskName)
})
.catch((err) => {
console.error(err)
})
},
onDelete () {
this.dagCanvas.removeNode(this.currentTask.code)
},
showLog () {
this.dagChart.showLogDialog(this.currentTask.code)
},
show (x = 0, y = 0) {
this.dagCanvas.lockScroller()
this.visible = true
this.left = x + 10
this.top = y + 10
},
hide () {
this.dagCanvas.unlockScroller()
this.visible = false
}
}
}
</script>
<style lang="scss" scoped>
@import "./contextMenu";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,793 | [Improvement] [UI Next][V1.0.0-Alpha] Prettier and ESlint | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The files in the public folder are formatted when prettier or eslint is running.
### What you expected to happen
The files in the public folder are not formatted
### How to reproduce
`pnpm run prettier . `
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8793 | https://github.com/apache/dolphinscheduler/pull/8794 | 41b66b6adbf286671a9d9682d04b769ad9d21003 | cb5910c95b8c2c069183361feb8f8181acdb4874 | "2022-03-10T03:08:05Z" | java | "2022-03-10T03:59:23Z" | dolphinscheduler-ui-next/.eslintignore | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
node_modules
dist
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,793 | [Improvement] [UI Next][V1.0.0-Alpha] Prettier and ESlint | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The files in the public folder are formatted when prettier or eslint is running.
### What you expected to happen
The files in the public folder are not formatted
### How to reproduce
`pnpm run prettier . `
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8793 | https://github.com/apache/dolphinscheduler/pull/8794 | 41b66b6adbf286671a9d9682d04b769ad9d21003 | cb5910c95b8c2c069183361feb8f8181acdb4874 | "2022-03-10T03:08:05Z" | java | "2022-03-10T03:59:23Z" | dolphinscheduler-ui-next/.prettierignore | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
node_modules
dist
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,793 | [Improvement] [UI Next][V1.0.0-Alpha] Prettier and ESlint | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The files in the public folder are formatted when prettier or eslint is running.
### What you expected to happen
The files in the public folder are not formatted
### How to reproduce
`pnpm run prettier . `
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8793 | https://github.com/apache/dolphinscheduler/pull/8794 | 41b66b6adbf286671a9d9682d04b769ad9d21003 | cb5910c95b8c2c069183361feb8f8181acdb4874 | "2022-03-10T03:08:05Z" | java | "2022-03-10T03:59:23Z" | dolphinscheduler-ui-next/public/lodash.min.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(){function n(n,t,r){switch(r.length){case 0:return n.call(t);case 1:return n.call(t,r[0]);case 2:return n.call(t,r[0],r[1]);case 3:return n.call(t,r[0],r[1],r[2])}return n.apply(t,r)}function t(n,t,r,e){for(var u=-1,i=null==n?0:n.length;++u<i;){var o=n[u];t(e,o,r(o),n)}return e}function r(n,t){for(var r=-1,e=null==n?0:n.length;++r<e&&t(n[r],r,n)!==!1;);return n}function e(n,t){for(var r=null==n?0:n.length;r--&&t(n[r],r,n)!==!1;);return n}function u(n,t){for(var r=-1,e=null==n?0:n.length;++r<e;)if(!t(n[r],r,n))return!1;
return!0}function i(n,t){for(var r=-1,e=null==n?0:n.length,u=0,i=[];++r<e;){var o=n[r];t(o,r,n)&&(i[u++]=o)}return i}function o(n,t){return!!(null==n?0:n.length)&&y(n,t,0)>-1}function f(n,t,r){for(var e=-1,u=null==n?0:n.length;++e<u;)if(r(t,n[e]))return!0;return!1}function c(n,t){for(var r=-1,e=null==n?0:n.length,u=Array(e);++r<e;)u[r]=t(n[r],r,n);return u}function a(n,t){for(var r=-1,e=t.length,u=n.length;++r<e;)n[u+r]=t[r];return n}function l(n,t,r,e){var u=-1,i=null==n?0:n.length;for(e&&i&&(r=n[++u]);++u<i;)r=t(r,n[u],u,n);
return r}function s(n,t,r,e){var u=null==n?0:n.length;for(e&&u&&(r=n[--u]);u--;)r=t(r,n[u],u,n);return r}function h(n,t){for(var r=-1,e=null==n?0:n.length;++r<e;)if(t(n[r],r,n))return!0;return!1}function p(n){return n.split("")}function _(n){return n.match($t)||[]}function v(n,t,r){var e;return r(n,function(n,r,u){if(t(n,r,u))return e=r,!1}),e}function g(n,t,r,e){for(var u=n.length,i=r+(e?1:-1);e?i--:++i<u;)if(t(n[i],i,n))return i;return-1}function y(n,t,r){return t===t?Z(n,t,r):g(n,b,r)}function d(n,t,r,e){
for(var u=r-1,i=n.length;++u<i;)if(e(n[u],t))return u;return-1}function b(n){return n!==n}function w(n,t){var r=null==n?0:n.length;return r?k(n,t)/r:Cn}function m(n){return function(t){return null==t?X:t[n]}}function x(n){return function(t){return null==n?X:n[t]}}function j(n,t,r,e,u){return u(n,function(n,u,i){r=e?(e=!1,n):t(r,n,u,i)}),r}function A(n,t){var r=n.length;for(n.sort(t);r--;)n[r]=n[r].value;return n}function k(n,t){for(var r,e=-1,u=n.length;++e<u;){var i=t(n[e]);i!==X&&(r=r===X?i:r+i);
}return r}function O(n,t){for(var r=-1,e=Array(n);++r<n;)e[r]=t(r);return e}function I(n,t){return c(t,function(t){return[t,n[t]]})}function R(n){return n?n.slice(0,H(n)+1).replace(Lt,""):n}function z(n){return function(t){return n(t)}}function E(n,t){return c(t,function(t){return n[t]})}function S(n,t){return n.has(t)}function W(n,t){for(var r=-1,e=n.length;++r<e&&y(t,n[r],0)>-1;);return r}function L(n,t){for(var r=n.length;r--&&y(t,n[r],0)>-1;);return r}function C(n,t){for(var r=n.length,e=0;r--;)n[r]===t&&++e;
return e}function U(n){return"\\"+Yr[n]}function B(n,t){return null==n?X:n[t]}function T(n){return Nr.test(n)}function $(n){return Pr.test(n)}function D(n){for(var t,r=[];!(t=n.next()).done;)r.push(t.value);return r}function M(n){var t=-1,r=Array(n.size);return n.forEach(function(n,e){r[++t]=[e,n]}),r}function F(n,t){return function(r){return n(t(r))}}function N(n,t){for(var r=-1,e=n.length,u=0,i=[];++r<e;){var o=n[r];o!==t&&o!==cn||(n[r]=cn,i[u++]=r)}return i}function P(n){var t=-1,r=Array(n.size);
return n.forEach(function(n){r[++t]=n}),r}function q(n){var t=-1,r=Array(n.size);return n.forEach(function(n){r[++t]=[n,n]}),r}function Z(n,t,r){for(var e=r-1,u=n.length;++e<u;)if(n[e]===t)return e;return-1}function K(n,t,r){for(var e=r+1;e--;)if(n[e]===t)return e;return e}function V(n){return T(n)?J(n):_e(n)}function G(n){return T(n)?Y(n):p(n)}function H(n){for(var t=n.length;t--&&Ct.test(n.charAt(t)););return t}function J(n){for(var t=Mr.lastIndex=0;Mr.test(n);)++t;return t}function Y(n){return n.match(Mr)||[];
}function Q(n){return n.match(Fr)||[]}var X,nn="4.17.21",tn=200,rn="Unsupported core-js use. Try https://npms.io/search?q=ponyfill.",en="Expected a function",un="Invalid `variable` option passed into `_.template`",on="__lodash_hash_undefined__",fn=500,cn="__lodash_placeholder__",an=1,ln=2,sn=4,hn=1,pn=2,_n=1,vn=2,gn=4,yn=8,dn=16,bn=32,wn=64,mn=128,xn=256,jn=512,An=30,kn="...",On=800,In=16,Rn=1,zn=2,En=3,Sn=1/0,Wn=9007199254740991,Ln=1.7976931348623157e308,Cn=NaN,Un=4294967295,Bn=Un-1,Tn=Un>>>1,$n=[["ary",mn],["bind",_n],["bindKey",vn],["curry",yn],["curryRight",dn],["flip",jn],["partial",bn],["partialRight",wn],["rearg",xn]],Dn="[object Arguments]",Mn="[object Array]",Fn="[object AsyncFunction]",Nn="[object Boolean]",Pn="[object Date]",qn="[object DOMException]",Zn="[object Error]",Kn="[object Function]",Vn="[object GeneratorFunction]",Gn="[object Map]",Hn="[object Number]",Jn="[object Null]",Yn="[object Object]",Qn="[object Promise]",Xn="[object Proxy]",nt="[object RegExp]",tt="[object Set]",rt="[object String]",et="[object Symbol]",ut="[object Undefined]",it="[object WeakMap]",ot="[object WeakSet]",ft="[object ArrayBuffer]",ct="[object DataView]",at="[object Float32Array]",lt="[object Float64Array]",st="[object Int8Array]",ht="[object Int16Array]",pt="[object Int32Array]",_t="[object Uint8Array]",vt="[object Uint8ClampedArray]",gt="[object Uint16Array]",yt="[object Uint32Array]",dt=/\b__p \+= '';/g,bt=/\b(__p \+=) '' \+/g,wt=/(__e\(.*?\)|\b__t\)) \+\n'';/g,mt=/&(?:amp|lt|gt|quot|#39);/g,xt=/[&<>"']/g,jt=RegExp(mt.source),At=RegExp(xt.source),kt=/<%-([\s\S]+?)%>/g,Ot=/<%([\s\S]+?)%>/g,It=/<%=([\s\S]+?)%>/g,Rt=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,zt=/^\w*$/,Et=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g,St=/[\\^$.*+?()[\]{}|]/g,Wt=RegExp(St.source),Lt=/^\s+/,Ct=/\s/,Ut=/\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/,Bt=/\{\n\/\* \[wrapped with (.+)\] \*/,Tt=/,? & /,$t=/[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g,Dt=/[()=,{}\[\]\/\s]/,Mt=/\\(\\)?/g,Ft=/\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g,Nt=/\w*$/,Pt=/^[-+]0x[0-9a-f]+$/i,qt=/^0b[01]+$/i,Zt=/^\[object .+?Constructor\]$/,Kt=/^0o[0-7]+$/i,Vt=/^(?:0|[1-9]\d*)$/,Gt=/[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g,Ht=/($^)/,Jt=/['\n\r\u2028\u2029\\]/g,Yt="\\ud800-\\udfff",Qt="\\u0300-\\u036f",Xt="\\ufe20-\\ufe2f",nr="\\u20d0-\\u20ff",tr=Qt+Xt+nr,rr="\\u2700-\\u27bf",er="a-z\\xdf-\\xf6\\xf8-\\xff",ur="\\xac\\xb1\\xd7\\xf7",ir="\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf",or="\\u2000-\\u206f",fr=" \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000",cr="A-Z\\xc0-\\xd6\\xd8-\\xde",ar="\\ufe0e\\ufe0f",lr=ur+ir+or+fr,sr="['\u2019]",hr="["+Yt+"]",pr="["+lr+"]",_r="["+tr+"]",vr="\\d+",gr="["+rr+"]",yr="["+er+"]",dr="[^"+Yt+lr+vr+rr+er+cr+"]",br="\\ud83c[\\udffb-\\udfff]",wr="(?:"+_r+"|"+br+")",mr="[^"+Yt+"]",xr="(?:\\ud83c[\\udde6-\\uddff]){2}",jr="[\\ud800-\\udbff][\\udc00-\\udfff]",Ar="["+cr+"]",kr="\\u200d",Or="(?:"+yr+"|"+dr+")",Ir="(?:"+Ar+"|"+dr+")",Rr="(?:"+sr+"(?:d|ll|m|re|s|t|ve))?",zr="(?:"+sr+"(?:D|LL|M|RE|S|T|VE))?",Er=wr+"?",Sr="["+ar+"]?",Wr="(?:"+kr+"(?:"+[mr,xr,jr].join("|")+")"+Sr+Er+")*",Lr="\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])",Cr="\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])",Ur=Sr+Er+Wr,Br="(?:"+[gr,xr,jr].join("|")+")"+Ur,Tr="(?:"+[mr+_r+"?",_r,xr,jr,hr].join("|")+")",$r=RegExp(sr,"g"),Dr=RegExp(_r,"g"),Mr=RegExp(br+"(?="+br+")|"+Tr+Ur,"g"),Fr=RegExp([Ar+"?"+yr+"+"+Rr+"(?="+[pr,Ar,"$"].join("|")+")",Ir+"+"+zr+"(?="+[pr,Ar+Or,"$"].join("|")+")",Ar+"?"+Or+"+"+Rr,Ar+"+"+zr,Cr,Lr,vr,Br].join("|"),"g"),Nr=RegExp("["+kr+Yt+tr+ar+"]"),Pr=/[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/,qr=["Array","Buffer","DataView","Date","Error","Float32Array","Float64Array","Function","Int8Array","Int16Array","Int32Array","Map","Math","Object","Promise","RegExp","Set","String","Symbol","TypeError","Uint8Array","Uint8ClampedArray","Uint16Array","Uint32Array","WeakMap","_","clearTimeout","isFinite","parseInt","setTimeout"],Zr=-1,Kr={};
Kr[at]=Kr[lt]=Kr[st]=Kr[ht]=Kr[pt]=Kr[_t]=Kr[vt]=Kr[gt]=Kr[yt]=!0,Kr[Dn]=Kr[Mn]=Kr[ft]=Kr[Nn]=Kr[ct]=Kr[Pn]=Kr[Zn]=Kr[Kn]=Kr[Gn]=Kr[Hn]=Kr[Yn]=Kr[nt]=Kr[tt]=Kr[rt]=Kr[it]=!1;var Vr={};Vr[Dn]=Vr[Mn]=Vr[ft]=Vr[ct]=Vr[Nn]=Vr[Pn]=Vr[at]=Vr[lt]=Vr[st]=Vr[ht]=Vr[pt]=Vr[Gn]=Vr[Hn]=Vr[Yn]=Vr[nt]=Vr[tt]=Vr[rt]=Vr[et]=Vr[_t]=Vr[vt]=Vr[gt]=Vr[yt]=!0,Vr[Zn]=Vr[Kn]=Vr[it]=!1;var Gr={"\xc0":"A","\xc1":"A","\xc2":"A","\xc3":"A","\xc4":"A","\xc5":"A","\xe0":"a","\xe1":"a","\xe2":"a","\xe3":"a","\xe4":"a","\xe5":"a",
"\xc7":"C","\xe7":"c","\xd0":"D","\xf0":"d","\xc8":"E","\xc9":"E","\xca":"E","\xcb":"E","\xe8":"e","\xe9":"e","\xea":"e","\xeb":"e","\xcc":"I","\xcd":"I","\xce":"I","\xcf":"I","\xec":"i","\xed":"i","\xee":"i","\xef":"i","\xd1":"N","\xf1":"n","\xd2":"O","\xd3":"O","\xd4":"O","\xd5":"O","\xd6":"O","\xd8":"O","\xf2":"o","\xf3":"o","\xf4":"o","\xf5":"o","\xf6":"o","\xf8":"o","\xd9":"U","\xda":"U","\xdb":"U","\xdc":"U","\xf9":"u","\xfa":"u","\xfb":"u","\xfc":"u","\xdd":"Y","\xfd":"y","\xff":"y","\xc6":"Ae",
"\xe6":"ae","\xde":"Th","\xfe":"th","\xdf":"ss","\u0100":"A","\u0102":"A","\u0104":"A","\u0101":"a","\u0103":"a","\u0105":"a","\u0106":"C","\u0108":"C","\u010a":"C","\u010c":"C","\u0107":"c","\u0109":"c","\u010b":"c","\u010d":"c","\u010e":"D","\u0110":"D","\u010f":"d","\u0111":"d","\u0112":"E","\u0114":"E","\u0116":"E","\u0118":"E","\u011a":"E","\u0113":"e","\u0115":"e","\u0117":"e","\u0119":"e","\u011b":"e","\u011c":"G","\u011e":"G","\u0120":"G","\u0122":"G","\u011d":"g","\u011f":"g","\u0121":"g",
"\u0123":"g","\u0124":"H","\u0126":"H","\u0125":"h","\u0127":"h","\u0128":"I","\u012a":"I","\u012c":"I","\u012e":"I","\u0130":"I","\u0129":"i","\u012b":"i","\u012d":"i","\u012f":"i","\u0131":"i","\u0134":"J","\u0135":"j","\u0136":"K","\u0137":"k","\u0138":"k","\u0139":"L","\u013b":"L","\u013d":"L","\u013f":"L","\u0141":"L","\u013a":"l","\u013c":"l","\u013e":"l","\u0140":"l","\u0142":"l","\u0143":"N","\u0145":"N","\u0147":"N","\u014a":"N","\u0144":"n","\u0146":"n","\u0148":"n","\u014b":"n","\u014c":"O",
"\u014e":"O","\u0150":"O","\u014d":"o","\u014f":"o","\u0151":"o","\u0154":"R","\u0156":"R","\u0158":"R","\u0155":"r","\u0157":"r","\u0159":"r","\u015a":"S","\u015c":"S","\u015e":"S","\u0160":"S","\u015b":"s","\u015d":"s","\u015f":"s","\u0161":"s","\u0162":"T","\u0164":"T","\u0166":"T","\u0163":"t","\u0165":"t","\u0167":"t","\u0168":"U","\u016a":"U","\u016c":"U","\u016e":"U","\u0170":"U","\u0172":"U","\u0169":"u","\u016b":"u","\u016d":"u","\u016f":"u","\u0171":"u","\u0173":"u","\u0174":"W","\u0175":"w",
"\u0176":"Y","\u0177":"y","\u0178":"Y","\u0179":"Z","\u017b":"Z","\u017d":"Z","\u017a":"z","\u017c":"z","\u017e":"z","\u0132":"IJ","\u0133":"ij","\u0152":"Oe","\u0153":"oe","\u0149":"'n","\u017f":"s"},Hr={"&":"&","<":"<",">":">",'"':""","'":"'"},Jr={"&":"&","<":"<",">":">",""":'"',"'":"'"},Yr={"\\":"\\","'":"'","\n":"n","\r":"r","\u2028":"u2028","\u2029":"u2029"},Qr=parseFloat,Xr=parseInt,ne="object"==typeof global&&global&&global.Object===Object&&global,te="object"==typeof self&&self&&self.Object===Object&&self,re=ne||te||Function("return this")(),ee="object"==typeof exports&&exports&&!exports.nodeType&&exports,ue=ee&&"object"==typeof module&&module&&!module.nodeType&&module,ie=ue&&ue.exports===ee,oe=ie&&ne.process,fe=function(){
try{var n=ue&&ue.require&&ue.require("util").types;return n?n:oe&&oe.binding&&oe.binding("util")}catch(n){}}(),ce=fe&&fe.isArrayBuffer,ae=fe&&fe.isDate,le=fe&&fe.isMap,se=fe&&fe.isRegExp,he=fe&&fe.isSet,pe=fe&&fe.isTypedArray,_e=m("length"),ve=x(Gr),ge=x(Hr),ye=x(Jr),de=function p(x){function Z(n){if(cc(n)&&!bh(n)&&!(n instanceof Ct)){if(n instanceof Y)return n;if(bl.call(n,"__wrapped__"))return eo(n)}return new Y(n)}function J(){}function Y(n,t){this.__wrapped__=n,this.__actions__=[],this.__chain__=!!t,
this.__index__=0,this.__values__=X}function Ct(n){this.__wrapped__=n,this.__actions__=[],this.__dir__=1,this.__filtered__=!1,this.__iteratees__=[],this.__takeCount__=Un,this.__views__=[]}function $t(){var n=new Ct(this.__wrapped__);return n.__actions__=Tu(this.__actions__),n.__dir__=this.__dir__,n.__filtered__=this.__filtered__,n.__iteratees__=Tu(this.__iteratees__),n.__takeCount__=this.__takeCount__,n.__views__=Tu(this.__views__),n}function Yt(){if(this.__filtered__){var n=new Ct(this);n.__dir__=-1,
n.__filtered__=!0}else n=this.clone(),n.__dir__*=-1;return n}function Qt(){var n=this.__wrapped__.value(),t=this.__dir__,r=bh(n),e=t<0,u=r?n.length:0,i=Oi(0,u,this.__views__),o=i.start,f=i.end,c=f-o,a=e?f:o-1,l=this.__iteratees__,s=l.length,h=0,p=Hl(c,this.__takeCount__);if(!r||!e&&u==c&&p==c)return wu(n,this.__actions__);var _=[];n:for(;c--&&h<p;){a+=t;for(var v=-1,g=n[a];++v<s;){var y=l[v],d=y.iteratee,b=y.type,w=d(g);if(b==zn)g=w;else if(!w){if(b==Rn)continue n;break n}}_[h++]=g}return _}function Xt(n){
var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){var e=n[t];this.set(e[0],e[1])}}function nr(){this.__data__=is?is(null):{},this.size=0}function tr(n){var t=this.has(n)&&delete this.__data__[n];return this.size-=t?1:0,t}function rr(n){var t=this.__data__;if(is){var r=t[n];return r===on?X:r}return bl.call(t,n)?t[n]:X}function er(n){var t=this.__data__;return is?t[n]!==X:bl.call(t,n)}function ur(n,t){var r=this.__data__;return this.size+=this.has(n)?0:1,r[n]=is&&t===X?on:t,this}function ir(n){
var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){var e=n[t];this.set(e[0],e[1])}}function or(){this.__data__=[],this.size=0}function fr(n){var t=this.__data__,r=Wr(t,n);return!(r<0)&&(r==t.length-1?t.pop():Ll.call(t,r,1),--this.size,!0)}function cr(n){var t=this.__data__,r=Wr(t,n);return r<0?X:t[r][1]}function ar(n){return Wr(this.__data__,n)>-1}function lr(n,t){var r=this.__data__,e=Wr(r,n);return e<0?(++this.size,r.push([n,t])):r[e][1]=t,this}function sr(n){var t=-1,r=null==n?0:n.length;for(this.clear();++t<r;){
var e=n[t];this.set(e[0],e[1])}}function hr(){this.size=0,this.__data__={hash:new Xt,map:new(ts||ir),string:new Xt}}function pr(n){var t=xi(this,n).delete(n);return this.size-=t?1:0,t}function _r(n){return xi(this,n).get(n)}function vr(n){return xi(this,n).has(n)}function gr(n,t){var r=xi(this,n),e=r.size;return r.set(n,t),this.size+=r.size==e?0:1,this}function yr(n){var t=-1,r=null==n?0:n.length;for(this.__data__=new sr;++t<r;)this.add(n[t])}function dr(n){return this.__data__.set(n,on),this}function br(n){
return this.__data__.has(n)}function wr(n){this.size=(this.__data__=new ir(n)).size}function mr(){this.__data__=new ir,this.size=0}function xr(n){var t=this.__data__,r=t.delete(n);return this.size=t.size,r}function jr(n){return this.__data__.get(n)}function Ar(n){return this.__data__.has(n)}function kr(n,t){var r=this.__data__;if(r instanceof ir){var e=r.__data__;if(!ts||e.length<tn-1)return e.push([n,t]),this.size=++r.size,this;r=this.__data__=new sr(e)}return r.set(n,t),this.size=r.size,this}function Or(n,t){
var r=bh(n),e=!r&&dh(n),u=!r&&!e&&mh(n),i=!r&&!e&&!u&&Oh(n),o=r||e||u||i,f=o?O(n.length,hl):[],c=f.length;for(var a in n)!t&&!bl.call(n,a)||o&&("length"==a||u&&("offset"==a||"parent"==a)||i&&("buffer"==a||"byteLength"==a||"byteOffset"==a)||Ci(a,c))||f.push(a);return f}function Ir(n){var t=n.length;return t?n[tu(0,t-1)]:X}function Rr(n,t){return Xi(Tu(n),Mr(t,0,n.length))}function zr(n){return Xi(Tu(n))}function Er(n,t,r){(r===X||Gf(n[t],r))&&(r!==X||t in n)||Br(n,t,r)}function Sr(n,t,r){var e=n[t];
bl.call(n,t)&&Gf(e,r)&&(r!==X||t in n)||Br(n,t,r)}function Wr(n,t){for(var r=n.length;r--;)if(Gf(n[r][0],t))return r;return-1}function Lr(n,t,r,e){return ys(n,function(n,u,i){t(e,n,r(n),i)}),e}function Cr(n,t){return n&&$u(t,Pc(t),n)}function Ur(n,t){return n&&$u(t,qc(t),n)}function Br(n,t,r){"__proto__"==t&&Tl?Tl(n,t,{configurable:!0,enumerable:!0,value:r,writable:!0}):n[t]=r}function Tr(n,t){for(var r=-1,e=t.length,u=il(e),i=null==n;++r<e;)u[r]=i?X:Mc(n,t[r]);return u}function Mr(n,t,r){return n===n&&(r!==X&&(n=n<=r?n:r),
t!==X&&(n=n>=t?n:t)),n}function Fr(n,t,e,u,i,o){var f,c=t&an,a=t&ln,l=t&sn;if(e&&(f=i?e(n,u,i,o):e(n)),f!==X)return f;if(!fc(n))return n;var s=bh(n);if(s){if(f=zi(n),!c)return Tu(n,f)}else{var h=zs(n),p=h==Kn||h==Vn;if(mh(n))return Iu(n,c);if(h==Yn||h==Dn||p&&!i){if(f=a||p?{}:Ei(n),!c)return a?Mu(n,Ur(f,n)):Du(n,Cr(f,n))}else{if(!Vr[h])return i?n:{};f=Si(n,h,c)}}o||(o=new wr);var _=o.get(n);if(_)return _;o.set(n,f),kh(n)?n.forEach(function(r){f.add(Fr(r,t,e,r,n,o))}):jh(n)&&n.forEach(function(r,u){
f.set(u,Fr(r,t,e,u,n,o))});var v=l?a?di:yi:a?qc:Pc,g=s?X:v(n);return r(g||n,function(r,u){g&&(u=r,r=n[u]),Sr(f,u,Fr(r,t,e,u,n,o))}),f}function Nr(n){var t=Pc(n);return function(r){return Pr(r,n,t)}}function Pr(n,t,r){var e=r.length;if(null==n)return!e;for(n=ll(n);e--;){var u=r[e],i=t[u],o=n[u];if(o===X&&!(u in n)||!i(o))return!1}return!0}function Gr(n,t,r){if("function"!=typeof n)throw new pl(en);return Ws(function(){n.apply(X,r)},t)}function Hr(n,t,r,e){var u=-1,i=o,a=!0,l=n.length,s=[],h=t.length;
if(!l)return s;r&&(t=c(t,z(r))),e?(i=f,a=!1):t.length>=tn&&(i=S,a=!1,t=new yr(t));n:for(;++u<l;){var p=n[u],_=null==r?p:r(p);if(p=e||0!==p?p:0,a&&_===_){for(var v=h;v--;)if(t[v]===_)continue n;s.push(p)}else i(t,_,e)||s.push(p)}return s}function Jr(n,t){var r=!0;return ys(n,function(n,e,u){return r=!!t(n,e,u)}),r}function Yr(n,t,r){for(var e=-1,u=n.length;++e<u;){var i=n[e],o=t(i);if(null!=o&&(f===X?o===o&&!bc(o):r(o,f)))var f=o,c=i}return c}function ne(n,t,r,e){var u=n.length;for(r=kc(r),r<0&&(r=-r>u?0:u+r),
e=e===X||e>u?u:kc(e),e<0&&(e+=u),e=r>e?0:Oc(e);r<e;)n[r++]=t;return n}function te(n,t){var r=[];return ys(n,function(n,e,u){t(n,e,u)&&r.push(n)}),r}function ee(n,t,r,e,u){var i=-1,o=n.length;for(r||(r=Li),u||(u=[]);++i<o;){var f=n[i];t>0&&r(f)?t>1?ee(f,t-1,r,e,u):a(u,f):e||(u[u.length]=f)}return u}function ue(n,t){return n&&bs(n,t,Pc)}function oe(n,t){return n&&ws(n,t,Pc)}function fe(n,t){return i(t,function(t){return uc(n[t])})}function _e(n,t){t=ku(t,n);for(var r=0,e=t.length;null!=n&&r<e;)n=n[no(t[r++])];
return r&&r==e?n:X}function de(n,t,r){var e=t(n);return bh(n)?e:a(e,r(n))}function we(n){return null==n?n===X?ut:Jn:Bl&&Bl in ll(n)?ki(n):Ki(n)}function me(n,t){return n>t}function xe(n,t){return null!=n&&bl.call(n,t)}function je(n,t){return null!=n&&t in ll(n)}function Ae(n,t,r){return n>=Hl(t,r)&&n<Gl(t,r)}function ke(n,t,r){for(var e=r?f:o,u=n[0].length,i=n.length,a=i,l=il(i),s=1/0,h=[];a--;){var p=n[a];a&&t&&(p=c(p,z(t))),s=Hl(p.length,s),l[a]=!r&&(t||u>=120&&p.length>=120)?new yr(a&&p):X}p=n[0];
var _=-1,v=l[0];n:for(;++_<u&&h.length<s;){var g=p[_],y=t?t(g):g;if(g=r||0!==g?g:0,!(v?S(v,y):e(h,y,r))){for(a=i;--a;){var d=l[a];if(!(d?S(d,y):e(n[a],y,r)))continue n}v&&v.push(y),h.push(g)}}return h}function Oe(n,t,r,e){return ue(n,function(n,u,i){t(e,r(n),u,i)}),e}function Ie(t,r,e){r=ku(r,t),t=Gi(t,r);var u=null==t?t:t[no(jo(r))];return null==u?X:n(u,t,e)}function Re(n){return cc(n)&&we(n)==Dn}function ze(n){return cc(n)&&we(n)==ft}function Ee(n){return cc(n)&&we(n)==Pn}function Se(n,t,r,e,u){
return n===t||(null==n||null==t||!cc(n)&&!cc(t)?n!==n&&t!==t:We(n,t,r,e,Se,u))}function We(n,t,r,e,u,i){var o=bh(n),f=bh(t),c=o?Mn:zs(n),a=f?Mn:zs(t);c=c==Dn?Yn:c,a=a==Dn?Yn:a;var l=c==Yn,s=a==Yn,h=c==a;if(h&&mh(n)){if(!mh(t))return!1;o=!0,l=!1}if(h&&!l)return i||(i=new wr),o||Oh(n)?pi(n,t,r,e,u,i):_i(n,t,c,r,e,u,i);if(!(r&hn)){var p=l&&bl.call(n,"__wrapped__"),_=s&&bl.call(t,"__wrapped__");if(p||_){var v=p?n.value():n,g=_?t.value():t;return i||(i=new wr),u(v,g,r,e,i)}}return!!h&&(i||(i=new wr),vi(n,t,r,e,u,i));
}function Le(n){return cc(n)&&zs(n)==Gn}function Ce(n,t,r,e){var u=r.length,i=u,o=!e;if(null==n)return!i;for(n=ll(n);u--;){var f=r[u];if(o&&f[2]?f[1]!==n[f[0]]:!(f[0]in n))return!1}for(;++u<i;){f=r[u];var c=f[0],a=n[c],l=f[1];if(o&&f[2]){if(a===X&&!(c in n))return!1}else{var s=new wr;if(e)var h=e(a,l,c,n,t,s);if(!(h===X?Se(l,a,hn|pn,e,s):h))return!1}}return!0}function Ue(n){return!(!fc(n)||Di(n))&&(uc(n)?kl:Zt).test(to(n))}function Be(n){return cc(n)&&we(n)==nt}function Te(n){return cc(n)&&zs(n)==tt;
}function $e(n){return cc(n)&&oc(n.length)&&!!Kr[we(n)]}function De(n){return"function"==typeof n?n:null==n?La:"object"==typeof n?bh(n)?Ze(n[0],n[1]):qe(n):Fa(n)}function Me(n){if(!Mi(n))return Vl(n);var t=[];for(var r in ll(n))bl.call(n,r)&&"constructor"!=r&&t.push(r);return t}function Fe(n){if(!fc(n))return Zi(n);var t=Mi(n),r=[];for(var e in n)("constructor"!=e||!t&&bl.call(n,e))&&r.push(e);return r}function Ne(n,t){return n<t}function Pe(n,t){var r=-1,e=Hf(n)?il(n.length):[];return ys(n,function(n,u,i){
e[++r]=t(n,u,i)}),e}function qe(n){var t=ji(n);return 1==t.length&&t[0][2]?Ni(t[0][0],t[0][1]):function(r){return r===n||Ce(r,n,t)}}function Ze(n,t){return Bi(n)&&Fi(t)?Ni(no(n),t):function(r){var e=Mc(r,n);return e===X&&e===t?Nc(r,n):Se(t,e,hn|pn)}}function Ke(n,t,r,e,u){n!==t&&bs(t,function(i,o){if(u||(u=new wr),fc(i))Ve(n,t,o,r,Ke,e,u);else{var f=e?e(Ji(n,o),i,o+"",n,t,u):X;f===X&&(f=i),Er(n,o,f)}},qc)}function Ve(n,t,r,e,u,i,o){var f=Ji(n,r),c=Ji(t,r),a=o.get(c);if(a)return Er(n,r,a),X;var l=i?i(f,c,r+"",n,t,o):X,s=l===X;
if(s){var h=bh(c),p=!h&&mh(c),_=!h&&!p&&Oh(c);l=c,h||p||_?bh(f)?l=f:Jf(f)?l=Tu(f):p?(s=!1,l=Iu(c,!0)):_?(s=!1,l=Wu(c,!0)):l=[]:gc(c)||dh(c)?(l=f,dh(f)?l=Rc(f):fc(f)&&!uc(f)||(l=Ei(c))):s=!1}s&&(o.set(c,l),u(l,c,e,i,o),o.delete(c)),Er(n,r,l)}function Ge(n,t){var r=n.length;if(r)return t+=t<0?r:0,Ci(t,r)?n[t]:X}function He(n,t,r){t=t.length?c(t,function(n){return bh(n)?function(t){return _e(t,1===n.length?n[0]:n)}:n}):[La];var e=-1;return t=c(t,z(mi())),A(Pe(n,function(n,r,u){return{criteria:c(t,function(t){
return t(n)}),index:++e,value:n}}),function(n,t){return Cu(n,t,r)})}function Je(n,t){return Ye(n,t,function(t,r){return Nc(n,r)})}function Ye(n,t,r){for(var e=-1,u=t.length,i={};++e<u;){var o=t[e],f=_e(n,o);r(f,o)&&fu(i,ku(o,n),f)}return i}function Qe(n){return function(t){return _e(t,n)}}function Xe(n,t,r,e){var u=e?d:y,i=-1,o=t.length,f=n;for(n===t&&(t=Tu(t)),r&&(f=c(n,z(r)));++i<o;)for(var a=0,l=t[i],s=r?r(l):l;(a=u(f,s,a,e))>-1;)f!==n&&Ll.call(f,a,1),Ll.call(n,a,1);return n}function nu(n,t){for(var r=n?t.length:0,e=r-1;r--;){
var u=t[r];if(r==e||u!==i){var i=u;Ci(u)?Ll.call(n,u,1):yu(n,u)}}return n}function tu(n,t){return n+Nl(Ql()*(t-n+1))}function ru(n,t,r,e){for(var u=-1,i=Gl(Fl((t-n)/(r||1)),0),o=il(i);i--;)o[e?i:++u]=n,n+=r;return o}function eu(n,t){var r="";if(!n||t<1||t>Wn)return r;do t%2&&(r+=n),t=Nl(t/2),t&&(n+=n);while(t);return r}function uu(n,t){return Ls(Vi(n,t,La),n+"")}function iu(n){return Ir(ra(n))}function ou(n,t){var r=ra(n);return Xi(r,Mr(t,0,r.length))}function fu(n,t,r,e){if(!fc(n))return n;t=ku(t,n);
for(var u=-1,i=t.length,o=i-1,f=n;null!=f&&++u<i;){var c=no(t[u]),a=r;if("__proto__"===c||"constructor"===c||"prototype"===c)return n;if(u!=o){var l=f[c];a=e?e(l,c,f):X,a===X&&(a=fc(l)?l:Ci(t[u+1])?[]:{})}Sr(f,c,a),f=f[c]}return n}function cu(n){return Xi(ra(n))}function au(n,t,r){var e=-1,u=n.length;t<0&&(t=-t>u?0:u+t),r=r>u?u:r,r<0&&(r+=u),u=t>r?0:r-t>>>0,t>>>=0;for(var i=il(u);++e<u;)i[e]=n[e+t];return i}function lu(n,t){var r;return ys(n,function(n,e,u){return r=t(n,e,u),!r}),!!r}function su(n,t,r){
var e=0,u=null==n?e:n.length;if("number"==typeof t&&t===t&&u<=Tn){for(;e<u;){var i=e+u>>>1,o=n[i];null!==o&&!bc(o)&&(r?o<=t:o<t)?e=i+1:u=i}return u}return hu(n,t,La,r)}function hu(n,t,r,e){var u=0,i=null==n?0:n.length;if(0===i)return 0;t=r(t);for(var o=t!==t,f=null===t,c=bc(t),a=t===X;u<i;){var l=Nl((u+i)/2),s=r(n[l]),h=s!==X,p=null===s,_=s===s,v=bc(s);if(o)var g=e||_;else g=a?_&&(e||h):f?_&&h&&(e||!p):c?_&&h&&!p&&(e||!v):!p&&!v&&(e?s<=t:s<t);g?u=l+1:i=l}return Hl(i,Bn)}function pu(n,t){for(var r=-1,e=n.length,u=0,i=[];++r<e;){
var o=n[r],f=t?t(o):o;if(!r||!Gf(f,c)){var c=f;i[u++]=0===o?0:o}}return i}function _u(n){return"number"==typeof n?n:bc(n)?Cn:+n}function vu(n){if("string"==typeof n)return n;if(bh(n))return c(n,vu)+"";if(bc(n))return vs?vs.call(n):"";var t=n+"";return"0"==t&&1/n==-Sn?"-0":t}function gu(n,t,r){var e=-1,u=o,i=n.length,c=!0,a=[],l=a;if(r)c=!1,u=f;else if(i>=tn){var s=t?null:ks(n);if(s)return P(s);c=!1,u=S,l=new yr}else l=t?[]:a;n:for(;++e<i;){var h=n[e],p=t?t(h):h;if(h=r||0!==h?h:0,c&&p===p){for(var _=l.length;_--;)if(l[_]===p)continue n;
t&&l.push(p),a.push(h)}else u(l,p,r)||(l!==a&&l.push(p),a.push(h))}return a}function yu(n,t){return t=ku(t,n),n=Gi(n,t),null==n||delete n[no(jo(t))]}function du(n,t,r,e){return fu(n,t,r(_e(n,t)),e)}function bu(n,t,r,e){for(var u=n.length,i=e?u:-1;(e?i--:++i<u)&&t(n[i],i,n););return r?au(n,e?0:i,e?i+1:u):au(n,e?i+1:0,e?u:i)}function wu(n,t){var r=n;return r instanceof Ct&&(r=r.value()),l(t,function(n,t){return t.func.apply(t.thisArg,a([n],t.args))},r)}function mu(n,t,r){var e=n.length;if(e<2)return e?gu(n[0]):[];
for(var u=-1,i=il(e);++u<e;)for(var o=n[u],f=-1;++f<e;)f!=u&&(i[u]=Hr(i[u]||o,n[f],t,r));return gu(ee(i,1),t,r)}function xu(n,t,r){for(var e=-1,u=n.length,i=t.length,o={};++e<u;){r(o,n[e],e<i?t[e]:X)}return o}function ju(n){return Jf(n)?n:[]}function Au(n){return"function"==typeof n?n:La}function ku(n,t){return bh(n)?n:Bi(n,t)?[n]:Cs(Ec(n))}function Ou(n,t,r){var e=n.length;return r=r===X?e:r,!t&&r>=e?n:au(n,t,r)}function Iu(n,t){if(t)return n.slice();var r=n.length,e=zl?zl(r):new n.constructor(r);
return n.copy(e),e}function Ru(n){var t=new n.constructor(n.byteLength);return new Rl(t).set(new Rl(n)),t}function zu(n,t){return new n.constructor(t?Ru(n.buffer):n.buffer,n.byteOffset,n.byteLength)}function Eu(n){var t=new n.constructor(n.source,Nt.exec(n));return t.lastIndex=n.lastIndex,t}function Su(n){return _s?ll(_s.call(n)):{}}function Wu(n,t){return new n.constructor(t?Ru(n.buffer):n.buffer,n.byteOffset,n.length)}function Lu(n,t){if(n!==t){var r=n!==X,e=null===n,u=n===n,i=bc(n),o=t!==X,f=null===t,c=t===t,a=bc(t);
if(!f&&!a&&!i&&n>t||i&&o&&c&&!f&&!a||e&&o&&c||!r&&c||!u)return 1;if(!e&&!i&&!a&&n<t||a&&r&&u&&!e&&!i||f&&r&&u||!o&&u||!c)return-1}return 0}function Cu(n,t,r){for(var e=-1,u=n.criteria,i=t.criteria,o=u.length,f=r.length;++e<o;){var c=Lu(u[e],i[e]);if(c){if(e>=f)return c;return c*("desc"==r[e]?-1:1)}}return n.index-t.index}function Uu(n,t,r,e){for(var u=-1,i=n.length,o=r.length,f=-1,c=t.length,a=Gl(i-o,0),l=il(c+a),s=!e;++f<c;)l[f]=t[f];for(;++u<o;)(s||u<i)&&(l[r[u]]=n[u]);for(;a--;)l[f++]=n[u++];return l;
}function Bu(n,t,r,e){for(var u=-1,i=n.length,o=-1,f=r.length,c=-1,a=t.length,l=Gl(i-f,0),s=il(l+a),h=!e;++u<l;)s[u]=n[u];for(var p=u;++c<a;)s[p+c]=t[c];for(;++o<f;)(h||u<i)&&(s[p+r[o]]=n[u++]);return s}function Tu(n,t){var r=-1,e=n.length;for(t||(t=il(e));++r<e;)t[r]=n[r];return t}function $u(n,t,r,e){var u=!r;r||(r={});for(var i=-1,o=t.length;++i<o;){var f=t[i],c=e?e(r[f],n[f],f,r,n):X;c===X&&(c=n[f]),u?Br(r,f,c):Sr(r,f,c)}return r}function Du(n,t){return $u(n,Is(n),t)}function Mu(n,t){return $u(n,Rs(n),t);
}function Fu(n,r){return function(e,u){var i=bh(e)?t:Lr,o=r?r():{};return i(e,n,mi(u,2),o)}}function Nu(n){return uu(function(t,r){var e=-1,u=r.length,i=u>1?r[u-1]:X,o=u>2?r[2]:X;for(i=n.length>3&&"function"==typeof i?(u--,i):X,o&&Ui(r[0],r[1],o)&&(i=u<3?X:i,u=1),t=ll(t);++e<u;){var f=r[e];f&&n(t,f,e,i)}return t})}function Pu(n,t){return function(r,e){if(null==r)return r;if(!Hf(r))return n(r,e);for(var u=r.length,i=t?u:-1,o=ll(r);(t?i--:++i<u)&&e(o[i],i,o)!==!1;);return r}}function qu(n){return function(t,r,e){
for(var u=-1,i=ll(t),o=e(t),f=o.length;f--;){var c=o[n?f:++u];if(r(i[c],c,i)===!1)break}return t}}function Zu(n,t,r){function e(){return(this&&this!==re&&this instanceof e?i:n).apply(u?r:this,arguments)}var u=t&_n,i=Gu(n);return e}function Ku(n){return function(t){t=Ec(t);var r=T(t)?G(t):X,e=r?r[0]:t.charAt(0),u=r?Ou(r,1).join(""):t.slice(1);return e[n]()+u}}function Vu(n){return function(t){return l(Ra(ca(t).replace($r,"")),n,"")}}function Gu(n){return function(){var t=arguments;switch(t.length){
case 0:return new n;case 1:return new n(t[0]);case 2:return new n(t[0],t[1]);case 3:return new n(t[0],t[1],t[2]);case 4:return new n(t[0],t[1],t[2],t[3]);case 5:return new n(t[0],t[1],t[2],t[3],t[4]);case 6:return new n(t[0],t[1],t[2],t[3],t[4],t[5]);case 7:return new n(t[0],t[1],t[2],t[3],t[4],t[5],t[6])}var r=gs(n.prototype),e=n.apply(r,t);return fc(e)?e:r}}function Hu(t,r,e){function u(){for(var o=arguments.length,f=il(o),c=o,a=wi(u);c--;)f[c]=arguments[c];var l=o<3&&f[0]!==a&&f[o-1]!==a?[]:N(f,a);
return o-=l.length,o<e?oi(t,r,Qu,u.placeholder,X,f,l,X,X,e-o):n(this&&this!==re&&this instanceof u?i:t,this,f)}var i=Gu(t);return u}function Ju(n){return function(t,r,e){var u=ll(t);if(!Hf(t)){var i=mi(r,3);t=Pc(t),r=function(n){return i(u[n],n,u)}}var o=n(t,r,e);return o>-1?u[i?t[o]:o]:X}}function Yu(n){return gi(function(t){var r=t.length,e=r,u=Y.prototype.thru;for(n&&t.reverse();e--;){var i=t[e];if("function"!=typeof i)throw new pl(en);if(u&&!o&&"wrapper"==bi(i))var o=new Y([],!0)}for(e=o?e:r;++e<r;){
i=t[e];var f=bi(i),c="wrapper"==f?Os(i):X;o=c&&$i(c[0])&&c[1]==(mn|yn|bn|xn)&&!c[4].length&&1==c[9]?o[bi(c[0])].apply(o,c[3]):1==i.length&&$i(i)?o[f]():o.thru(i)}return function(){var n=arguments,e=n[0];if(o&&1==n.length&&bh(e))return o.plant(e).value();for(var u=0,i=r?t[u].apply(this,n):e;++u<r;)i=t[u].call(this,i);return i}})}function Qu(n,t,r,e,u,i,o,f,c,a){function l(){for(var y=arguments.length,d=il(y),b=y;b--;)d[b]=arguments[b];if(_)var w=wi(l),m=C(d,w);if(e&&(d=Uu(d,e,u,_)),i&&(d=Bu(d,i,o,_)),
y-=m,_&&y<a){return oi(n,t,Qu,l.placeholder,r,d,N(d,w),f,c,a-y)}var x=h?r:this,j=p?x[n]:n;return y=d.length,f?d=Hi(d,f):v&&y>1&&d.reverse(),s&&c<y&&(d.length=c),this&&this!==re&&this instanceof l&&(j=g||Gu(j)),j.apply(x,d)}var s=t&mn,h=t&_n,p=t&vn,_=t&(yn|dn),v=t&jn,g=p?X:Gu(n);return l}function Xu(n,t){return function(r,e){return Oe(r,n,t(e),{})}}function ni(n,t){return function(r,e){var u;if(r===X&&e===X)return t;if(r!==X&&(u=r),e!==X){if(u===X)return e;"string"==typeof r||"string"==typeof e?(r=vu(r),
e=vu(e)):(r=_u(r),e=_u(e)),u=n(r,e)}return u}}function ti(t){return gi(function(r){return r=c(r,z(mi())),uu(function(e){var u=this;return t(r,function(t){return n(t,u,e)})})})}function ri(n,t){t=t===X?" ":vu(t);var r=t.length;if(r<2)return r?eu(t,n):t;var e=eu(t,Fl(n/V(t)));return T(t)?Ou(G(e),0,n).join(""):e.slice(0,n)}function ei(t,r,e,u){function i(){for(var r=-1,c=arguments.length,a=-1,l=u.length,s=il(l+c),h=this&&this!==re&&this instanceof i?f:t;++a<l;)s[a]=u[a];for(;c--;)s[a++]=arguments[++r];
return n(h,o?e:this,s)}var o=r&_n,f=Gu(t);return i}function ui(n){return function(t,r,e){return e&&"number"!=typeof e&&Ui(t,r,e)&&(r=e=X),t=Ac(t),r===X?(r=t,t=0):r=Ac(r),e=e===X?t<r?1:-1:Ac(e),ru(t,r,e,n)}}function ii(n){return function(t,r){return"string"==typeof t&&"string"==typeof r||(t=Ic(t),r=Ic(r)),n(t,r)}}function oi(n,t,r,e,u,i,o,f,c,a){var l=t&yn,s=l?o:X,h=l?X:o,p=l?i:X,_=l?X:i;t|=l?bn:wn,t&=~(l?wn:bn),t&gn||(t&=~(_n|vn));var v=[n,t,u,p,s,_,h,f,c,a],g=r.apply(X,v);return $i(n)&&Ss(g,v),g.placeholder=e,
Yi(g,n,t)}function fi(n){var t=al[n];return function(n,r){if(n=Ic(n),r=null==r?0:Hl(kc(r),292),r&&Zl(n)){var e=(Ec(n)+"e").split("e");return e=(Ec(t(e[0]+"e"+(+e[1]+r)))+"e").split("e"),+(e[0]+"e"+(+e[1]-r))}return t(n)}}function ci(n){return function(t){var r=zs(t);return r==Gn?M(t):r==tt?q(t):I(t,n(t))}}function ai(n,t,r,e,u,i,o,f){var c=t&vn;if(!c&&"function"!=typeof n)throw new pl(en);var a=e?e.length:0;if(a||(t&=~(bn|wn),e=u=X),o=o===X?o:Gl(kc(o),0),f=f===X?f:kc(f),a-=u?u.length:0,t&wn){var l=e,s=u;
e=u=X}var h=c?X:Os(n),p=[n,t,r,e,u,l,s,i,o,f];if(h&&qi(p,h),n=p[0],t=p[1],r=p[2],e=p[3],u=p[4],f=p[9]=p[9]===X?c?0:n.length:Gl(p[9]-a,0),!f&&t&(yn|dn)&&(t&=~(yn|dn)),t&&t!=_n)_=t==yn||t==dn?Hu(n,t,f):t!=bn&&t!=(_n|bn)||u.length?Qu.apply(X,p):ei(n,t,r,e);else var _=Zu(n,t,r);return Yi((h?ms:Ss)(_,p),n,t)}function li(n,t,r,e){return n===X||Gf(n,gl[r])&&!bl.call(e,r)?t:n}function si(n,t,r,e,u,i){return fc(n)&&fc(t)&&(i.set(t,n),Ke(n,t,X,si,i),i.delete(t)),n}function hi(n){return gc(n)?X:n}function pi(n,t,r,e,u,i){
var o=r&hn,f=n.length,c=t.length;if(f!=c&&!(o&&c>f))return!1;var a=i.get(n),l=i.get(t);if(a&&l)return a==t&&l==n;var s=-1,p=!0,_=r&pn?new yr:X;for(i.set(n,t),i.set(t,n);++s<f;){var v=n[s],g=t[s];if(e)var y=o?e(g,v,s,t,n,i):e(v,g,s,n,t,i);if(y!==X){if(y)continue;p=!1;break}if(_){if(!h(t,function(n,t){if(!S(_,t)&&(v===n||u(v,n,r,e,i)))return _.push(t)})){p=!1;break}}else if(v!==g&&!u(v,g,r,e,i)){p=!1;break}}return i.delete(n),i.delete(t),p}function _i(n,t,r,e,u,i,o){switch(r){case ct:if(n.byteLength!=t.byteLength||n.byteOffset!=t.byteOffset)return!1;
n=n.buffer,t=t.buffer;case ft:return!(n.byteLength!=t.byteLength||!i(new Rl(n),new Rl(t)));case Nn:case Pn:case Hn:return Gf(+n,+t);case Zn:return n.name==t.name&&n.message==t.message;case nt:case rt:return n==t+"";case Gn:var f=M;case tt:var c=e&hn;if(f||(f=P),n.size!=t.size&&!c)return!1;var a=o.get(n);if(a)return a==t;e|=pn,o.set(n,t);var l=pi(f(n),f(t),e,u,i,o);return o.delete(n),l;case et:if(_s)return _s.call(n)==_s.call(t)}return!1}function vi(n,t,r,e,u,i){var o=r&hn,f=yi(n),c=f.length;if(c!=yi(t).length&&!o)return!1;
for(var a=c;a--;){var l=f[a];if(!(o?l in t:bl.call(t,l)))return!1}var s=i.get(n),h=i.get(t);if(s&&h)return s==t&&h==n;var p=!0;i.set(n,t),i.set(t,n);for(var _=o;++a<c;){l=f[a];var v=n[l],g=t[l];if(e)var y=o?e(g,v,l,t,n,i):e(v,g,l,n,t,i);if(!(y===X?v===g||u(v,g,r,e,i):y)){p=!1;break}_||(_="constructor"==l)}if(p&&!_){var d=n.constructor,b=t.constructor;d!=b&&"constructor"in n&&"constructor"in t&&!("function"==typeof d&&d instanceof d&&"function"==typeof b&&b instanceof b)&&(p=!1)}return i.delete(n),
i.delete(t),p}function gi(n){return Ls(Vi(n,X,_o),n+"")}function yi(n){return de(n,Pc,Is)}function di(n){return de(n,qc,Rs)}function bi(n){for(var t=n.name+"",r=fs[t],e=bl.call(fs,t)?r.length:0;e--;){var u=r[e],i=u.func;if(null==i||i==n)return u.name}return t}function wi(n){return(bl.call(Z,"placeholder")?Z:n).placeholder}function mi(){var n=Z.iteratee||Ca;return n=n===Ca?De:n,arguments.length?n(arguments[0],arguments[1]):n}function xi(n,t){var r=n.__data__;return Ti(t)?r["string"==typeof t?"string":"hash"]:r.map;
}function ji(n){for(var t=Pc(n),r=t.length;r--;){var e=t[r],u=n[e];t[r]=[e,u,Fi(u)]}return t}function Ai(n,t){var r=B(n,t);return Ue(r)?r:X}function ki(n){var t=bl.call(n,Bl),r=n[Bl];try{n[Bl]=X;var e=!0}catch(n){}var u=xl.call(n);return e&&(t?n[Bl]=r:delete n[Bl]),u}function Oi(n,t,r){for(var e=-1,u=r.length;++e<u;){var i=r[e],o=i.size;switch(i.type){case"drop":n+=o;break;case"dropRight":t-=o;break;case"take":t=Hl(t,n+o);break;case"takeRight":n=Gl(n,t-o)}}return{start:n,end:t}}function Ii(n){var t=n.match(Bt);
return t?t[1].split(Tt):[]}function Ri(n,t,r){t=ku(t,n);for(var e=-1,u=t.length,i=!1;++e<u;){var o=no(t[e]);if(!(i=null!=n&&r(n,o)))break;n=n[o]}return i||++e!=u?i:(u=null==n?0:n.length,!!u&&oc(u)&&Ci(o,u)&&(bh(n)||dh(n)))}function zi(n){var t=n.length,r=new n.constructor(t);return t&&"string"==typeof n[0]&&bl.call(n,"index")&&(r.index=n.index,r.input=n.input),r}function Ei(n){return"function"!=typeof n.constructor||Mi(n)?{}:gs(El(n))}function Si(n,t,r){var e=n.constructor;switch(t){case ft:return Ru(n);
case Nn:case Pn:return new e(+n);case ct:return zu(n,r);case at:case lt:case st:case ht:case pt:case _t:case vt:case gt:case yt:return Wu(n,r);case Gn:return new e;case Hn:case rt:return new e(n);case nt:return Eu(n);case tt:return new e;case et:return Su(n)}}function Wi(n,t){var r=t.length;if(!r)return n;var e=r-1;return t[e]=(r>1?"& ":"")+t[e],t=t.join(r>2?", ":" "),n.replace(Ut,"{\n/* [wrapped with "+t+"] */\n")}function Li(n){return bh(n)||dh(n)||!!(Cl&&n&&n[Cl])}function Ci(n,t){var r=typeof n;
return t=null==t?Wn:t,!!t&&("number"==r||"symbol"!=r&&Vt.test(n))&&n>-1&&n%1==0&&n<t}function Ui(n,t,r){if(!fc(r))return!1;var e=typeof t;return!!("number"==e?Hf(r)&&Ci(t,r.length):"string"==e&&t in r)&&Gf(r[t],n)}function Bi(n,t){if(bh(n))return!1;var r=typeof n;return!("number"!=r&&"symbol"!=r&&"boolean"!=r&&null!=n&&!bc(n))||(zt.test(n)||!Rt.test(n)||null!=t&&n in ll(t))}function Ti(n){var t=typeof n;return"string"==t||"number"==t||"symbol"==t||"boolean"==t?"__proto__"!==n:null===n}function $i(n){
var t=bi(n),r=Z[t];if("function"!=typeof r||!(t in Ct.prototype))return!1;if(n===r)return!0;var e=Os(r);return!!e&&n===e[0]}function Di(n){return!!ml&&ml in n}function Mi(n){var t=n&&n.constructor;return n===("function"==typeof t&&t.prototype||gl)}function Fi(n){return n===n&&!fc(n)}function Ni(n,t){return function(r){return null!=r&&(r[n]===t&&(t!==X||n in ll(r)))}}function Pi(n){var t=Cf(n,function(n){return r.size===fn&&r.clear(),n}),r=t.cache;return t}function qi(n,t){var r=n[1],e=t[1],u=r|e,i=u<(_n|vn|mn),o=e==mn&&r==yn||e==mn&&r==xn&&n[7].length<=t[8]||e==(mn|xn)&&t[7].length<=t[8]&&r==yn;
if(!i&&!o)return n;e&_n&&(n[2]=t[2],u|=r&_n?0:gn);var f=t[3];if(f){var c=n[3];n[3]=c?Uu(c,f,t[4]):f,n[4]=c?N(n[3],cn):t[4]}return f=t[5],f&&(c=n[5],n[5]=c?Bu(c,f,t[6]):f,n[6]=c?N(n[5],cn):t[6]),f=t[7],f&&(n[7]=f),e&mn&&(n[8]=null==n[8]?t[8]:Hl(n[8],t[8])),null==n[9]&&(n[9]=t[9]),n[0]=t[0],n[1]=u,n}function Zi(n){var t=[];if(null!=n)for(var r in ll(n))t.push(r);return t}function Ki(n){return xl.call(n)}function Vi(t,r,e){return r=Gl(r===X?t.length-1:r,0),function(){for(var u=arguments,i=-1,o=Gl(u.length-r,0),f=il(o);++i<o;)f[i]=u[r+i];
i=-1;for(var c=il(r+1);++i<r;)c[i]=u[i];return c[r]=e(f),n(t,this,c)}}function Gi(n,t){return t.length<2?n:_e(n,au(t,0,-1))}function Hi(n,t){for(var r=n.length,e=Hl(t.length,r),u=Tu(n);e--;){var i=t[e];n[e]=Ci(i,r)?u[i]:X}return n}function Ji(n,t){if(("constructor"!==t||"function"!=typeof n[t])&&"__proto__"!=t)return n[t]}function Yi(n,t,r){var e=t+"";return Ls(n,Wi(e,ro(Ii(e),r)))}function Qi(n){var t=0,r=0;return function(){var e=Jl(),u=In-(e-r);if(r=e,u>0){if(++t>=On)return arguments[0]}else t=0;
return n.apply(X,arguments)}}function Xi(n,t){var r=-1,e=n.length,u=e-1;for(t=t===X?e:t;++r<t;){var i=tu(r,u),o=n[i];n[i]=n[r],n[r]=o}return n.length=t,n}function no(n){if("string"==typeof n||bc(n))return n;var t=n+"";return"0"==t&&1/n==-Sn?"-0":t}function to(n){if(null!=n){try{return dl.call(n)}catch(n){}try{return n+""}catch(n){}}return""}function ro(n,t){return r($n,function(r){var e="_."+r[0];t&r[1]&&!o(n,e)&&n.push(e)}),n.sort()}function eo(n){if(n instanceof Ct)return n.clone();var t=new Y(n.__wrapped__,n.__chain__);
return t.__actions__=Tu(n.__actions__),t.__index__=n.__index__,t.__values__=n.__values__,t}function uo(n,t,r){t=(r?Ui(n,t,r):t===X)?1:Gl(kc(t),0);var e=null==n?0:n.length;if(!e||t<1)return[];for(var u=0,i=0,o=il(Fl(e/t));u<e;)o[i++]=au(n,u,u+=t);return o}function io(n){for(var t=-1,r=null==n?0:n.length,e=0,u=[];++t<r;){var i=n[t];i&&(u[e++]=i)}return u}function oo(){var n=arguments.length;if(!n)return[];for(var t=il(n-1),r=arguments[0],e=n;e--;)t[e-1]=arguments[e];return a(bh(r)?Tu(r):[r],ee(t,1));
}function fo(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),au(n,t<0?0:t,e)):[]}function co(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),t=e-t,au(n,0,t<0?0:t)):[]}function ao(n,t){return n&&n.length?bu(n,mi(t,3),!0,!0):[]}function lo(n,t){return n&&n.length?bu(n,mi(t,3),!0):[]}function so(n,t,r,e){var u=null==n?0:n.length;return u?(r&&"number"!=typeof r&&Ui(n,t,r)&&(r=0,e=u),ne(n,t,r,e)):[]}function ho(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=null==r?0:kc(r);
return u<0&&(u=Gl(e+u,0)),g(n,mi(t,3),u)}function po(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=e-1;return r!==X&&(u=kc(r),u=r<0?Gl(e+u,0):Hl(u,e-1)),g(n,mi(t,3),u,!0)}function _o(n){return(null==n?0:n.length)?ee(n,1):[]}function vo(n){return(null==n?0:n.length)?ee(n,Sn):[]}function go(n,t){return(null==n?0:n.length)?(t=t===X?1:kc(t),ee(n,t)):[]}function yo(n){for(var t=-1,r=null==n?0:n.length,e={};++t<r;){var u=n[t];e[u[0]]=u[1]}return e}function bo(n){return n&&n.length?n[0]:X}function wo(n,t,r){
var e=null==n?0:n.length;if(!e)return-1;var u=null==r?0:kc(r);return u<0&&(u=Gl(e+u,0)),y(n,t,u)}function mo(n){return(null==n?0:n.length)?au(n,0,-1):[]}function xo(n,t){return null==n?"":Kl.call(n,t)}function jo(n){var t=null==n?0:n.length;return t?n[t-1]:X}function Ao(n,t,r){var e=null==n?0:n.length;if(!e)return-1;var u=e;return r!==X&&(u=kc(r),u=u<0?Gl(e+u,0):Hl(u,e-1)),t===t?K(n,t,u):g(n,b,u,!0)}function ko(n,t){return n&&n.length?Ge(n,kc(t)):X}function Oo(n,t){return n&&n.length&&t&&t.length?Xe(n,t):n;
}function Io(n,t,r){return n&&n.length&&t&&t.length?Xe(n,t,mi(r,2)):n}function Ro(n,t,r){return n&&n.length&&t&&t.length?Xe(n,t,X,r):n}function zo(n,t){var r=[];if(!n||!n.length)return r;var e=-1,u=[],i=n.length;for(t=mi(t,3);++e<i;){var o=n[e];t(o,e,n)&&(r.push(o),u.push(e))}return nu(n,u),r}function Eo(n){return null==n?n:Xl.call(n)}function So(n,t,r){var e=null==n?0:n.length;return e?(r&&"number"!=typeof r&&Ui(n,t,r)?(t=0,r=e):(t=null==t?0:kc(t),r=r===X?e:kc(r)),au(n,t,r)):[]}function Wo(n,t){
return su(n,t)}function Lo(n,t,r){return hu(n,t,mi(r,2))}function Co(n,t){var r=null==n?0:n.length;if(r){var e=su(n,t);if(e<r&&Gf(n[e],t))return e}return-1}function Uo(n,t){return su(n,t,!0)}function Bo(n,t,r){return hu(n,t,mi(r,2),!0)}function To(n,t){if(null==n?0:n.length){var r=su(n,t,!0)-1;if(Gf(n[r],t))return r}return-1}function $o(n){return n&&n.length?pu(n):[]}function Do(n,t){return n&&n.length?pu(n,mi(t,2)):[]}function Mo(n){var t=null==n?0:n.length;return t?au(n,1,t):[]}function Fo(n,t,r){
return n&&n.length?(t=r||t===X?1:kc(t),au(n,0,t<0?0:t)):[]}function No(n,t,r){var e=null==n?0:n.length;return e?(t=r||t===X?1:kc(t),t=e-t,au(n,t<0?0:t,e)):[]}function Po(n,t){return n&&n.length?bu(n,mi(t,3),!1,!0):[]}function qo(n,t){return n&&n.length?bu(n,mi(t,3)):[]}function Zo(n){return n&&n.length?gu(n):[]}function Ko(n,t){return n&&n.length?gu(n,mi(t,2)):[]}function Vo(n,t){return t="function"==typeof t?t:X,n&&n.length?gu(n,X,t):[]}function Go(n){if(!n||!n.length)return[];var t=0;return n=i(n,function(n){
if(Jf(n))return t=Gl(n.length,t),!0}),O(t,function(t){return c(n,m(t))})}function Ho(t,r){if(!t||!t.length)return[];var e=Go(t);return null==r?e:c(e,function(t){return n(r,X,t)})}function Jo(n,t){return xu(n||[],t||[],Sr)}function Yo(n,t){return xu(n||[],t||[],fu)}function Qo(n){var t=Z(n);return t.__chain__=!0,t}function Xo(n,t){return t(n),n}function nf(n,t){return t(n)}function tf(){return Qo(this)}function rf(){return new Y(this.value(),this.__chain__)}function ef(){this.__values__===X&&(this.__values__=jc(this.value()));
var n=this.__index__>=this.__values__.length;return{done:n,value:n?X:this.__values__[this.__index__++]}}function uf(){return this}function of(n){for(var t,r=this;r instanceof J;){var e=eo(r);e.__index__=0,e.__values__=X,t?u.__wrapped__=e:t=e;var u=e;r=r.__wrapped__}return u.__wrapped__=n,t}function ff(){var n=this.__wrapped__;if(n instanceof Ct){var t=n;return this.__actions__.length&&(t=new Ct(this)),t=t.reverse(),t.__actions__.push({func:nf,args:[Eo],thisArg:X}),new Y(t,this.__chain__)}return this.thru(Eo);
}function cf(){return wu(this.__wrapped__,this.__actions__)}function af(n,t,r){var e=bh(n)?u:Jr;return r&&Ui(n,t,r)&&(t=X),e(n,mi(t,3))}function lf(n,t){return(bh(n)?i:te)(n,mi(t,3))}function sf(n,t){return ee(yf(n,t),1)}function hf(n,t){return ee(yf(n,t),Sn)}function pf(n,t,r){return r=r===X?1:kc(r),ee(yf(n,t),r)}function _f(n,t){return(bh(n)?r:ys)(n,mi(t,3))}function vf(n,t){return(bh(n)?e:ds)(n,mi(t,3))}function gf(n,t,r,e){n=Hf(n)?n:ra(n),r=r&&!e?kc(r):0;var u=n.length;return r<0&&(r=Gl(u+r,0)),
dc(n)?r<=u&&n.indexOf(t,r)>-1:!!u&&y(n,t,r)>-1}function yf(n,t){return(bh(n)?c:Pe)(n,mi(t,3))}function df(n,t,r,e){return null==n?[]:(bh(t)||(t=null==t?[]:[t]),r=e?X:r,bh(r)||(r=null==r?[]:[r]),He(n,t,r))}function bf(n,t,r){var e=bh(n)?l:j,u=arguments.length<3;return e(n,mi(t,4),r,u,ys)}function wf(n,t,r){var e=bh(n)?s:j,u=arguments.length<3;return e(n,mi(t,4),r,u,ds)}function mf(n,t){return(bh(n)?i:te)(n,Uf(mi(t,3)))}function xf(n){return(bh(n)?Ir:iu)(n)}function jf(n,t,r){return t=(r?Ui(n,t,r):t===X)?1:kc(t),
(bh(n)?Rr:ou)(n,t)}function Af(n){return(bh(n)?zr:cu)(n)}function kf(n){if(null==n)return 0;if(Hf(n))return dc(n)?V(n):n.length;var t=zs(n);return t==Gn||t==tt?n.size:Me(n).length}function Of(n,t,r){var e=bh(n)?h:lu;return r&&Ui(n,t,r)&&(t=X),e(n,mi(t,3))}function If(n,t){if("function"!=typeof t)throw new pl(en);return n=kc(n),function(){if(--n<1)return t.apply(this,arguments)}}function Rf(n,t,r){return t=r?X:t,t=n&&null==t?n.length:t,ai(n,mn,X,X,X,X,t)}function zf(n,t){var r;if("function"!=typeof t)throw new pl(en);
return n=kc(n),function(){return--n>0&&(r=t.apply(this,arguments)),n<=1&&(t=X),r}}function Ef(n,t,r){t=r?X:t;var e=ai(n,yn,X,X,X,X,X,t);return e.placeholder=Ef.placeholder,e}function Sf(n,t,r){t=r?X:t;var e=ai(n,dn,X,X,X,X,X,t);return e.placeholder=Sf.placeholder,e}function Wf(n,t,r){function e(t){var r=h,e=p;return h=p=X,d=t,v=n.apply(e,r)}function u(n){return d=n,g=Ws(f,t),b?e(n):v}function i(n){var r=n-y,e=n-d,u=t-r;return w?Hl(u,_-e):u}function o(n){var r=n-y,e=n-d;return y===X||r>=t||r<0||w&&e>=_;
}function f(){var n=fh();return o(n)?c(n):(g=Ws(f,i(n)),X)}function c(n){return g=X,m&&h?e(n):(h=p=X,v)}function a(){g!==X&&As(g),d=0,h=y=p=g=X}function l(){return g===X?v:c(fh())}function s(){var n=fh(),r=o(n);if(h=arguments,p=this,y=n,r){if(g===X)return u(y);if(w)return As(g),g=Ws(f,t),e(y)}return g===X&&(g=Ws(f,t)),v}var h,p,_,v,g,y,d=0,b=!1,w=!1,m=!0;if("function"!=typeof n)throw new pl(en);return t=Ic(t)||0,fc(r)&&(b=!!r.leading,w="maxWait"in r,_=w?Gl(Ic(r.maxWait)||0,t):_,m="trailing"in r?!!r.trailing:m),
s.cancel=a,s.flush=l,s}function Lf(n){return ai(n,jn)}function Cf(n,t){if("function"!=typeof n||null!=t&&"function"!=typeof t)throw new pl(en);var r=function(){var e=arguments,u=t?t.apply(this,e):e[0],i=r.cache;if(i.has(u))return i.get(u);var o=n.apply(this,e);return r.cache=i.set(u,o)||i,o};return r.cache=new(Cf.Cache||sr),r}function Uf(n){if("function"!=typeof n)throw new pl(en);return function(){var t=arguments;switch(t.length){case 0:return!n.call(this);case 1:return!n.call(this,t[0]);case 2:
return!n.call(this,t[0],t[1]);case 3:return!n.call(this,t[0],t[1],t[2])}return!n.apply(this,t)}}function Bf(n){return zf(2,n)}function Tf(n,t){if("function"!=typeof n)throw new pl(en);return t=t===X?t:kc(t),uu(n,t)}function $f(t,r){if("function"!=typeof t)throw new pl(en);return r=null==r?0:Gl(kc(r),0),uu(function(e){var u=e[r],i=Ou(e,0,r);return u&&a(i,u),n(t,this,i)})}function Df(n,t,r){var e=!0,u=!0;if("function"!=typeof n)throw new pl(en);return fc(r)&&(e="leading"in r?!!r.leading:e,u="trailing"in r?!!r.trailing:u),
Wf(n,t,{leading:e,maxWait:t,trailing:u})}function Mf(n){return Rf(n,1)}function Ff(n,t){return ph(Au(t),n)}function Nf(){if(!arguments.length)return[];var n=arguments[0];return bh(n)?n:[n]}function Pf(n){return Fr(n,sn)}function qf(n,t){return t="function"==typeof t?t:X,Fr(n,sn,t)}function Zf(n){return Fr(n,an|sn)}function Kf(n,t){return t="function"==typeof t?t:X,Fr(n,an|sn,t)}function Vf(n,t){return null==t||Pr(n,t,Pc(t))}function Gf(n,t){return n===t||n!==n&&t!==t}function Hf(n){return null!=n&&oc(n.length)&&!uc(n);
}function Jf(n){return cc(n)&&Hf(n)}function Yf(n){return n===!0||n===!1||cc(n)&&we(n)==Nn}function Qf(n){return cc(n)&&1===n.nodeType&&!gc(n)}function Xf(n){if(null==n)return!0;if(Hf(n)&&(bh(n)||"string"==typeof n||"function"==typeof n.splice||mh(n)||Oh(n)||dh(n)))return!n.length;var t=zs(n);if(t==Gn||t==tt)return!n.size;if(Mi(n))return!Me(n).length;for(var r in n)if(bl.call(n,r))return!1;return!0}function nc(n,t){return Se(n,t)}function tc(n,t,r){r="function"==typeof r?r:X;var e=r?r(n,t):X;return e===X?Se(n,t,X,r):!!e;
}function rc(n){if(!cc(n))return!1;var t=we(n);return t==Zn||t==qn||"string"==typeof n.message&&"string"==typeof n.name&&!gc(n)}function ec(n){return"number"==typeof n&&Zl(n)}function uc(n){if(!fc(n))return!1;var t=we(n);return t==Kn||t==Vn||t==Fn||t==Xn}function ic(n){return"number"==typeof n&&n==kc(n)}function oc(n){return"number"==typeof n&&n>-1&&n%1==0&&n<=Wn}function fc(n){var t=typeof n;return null!=n&&("object"==t||"function"==t)}function cc(n){return null!=n&&"object"==typeof n}function ac(n,t){
return n===t||Ce(n,t,ji(t))}function lc(n,t,r){return r="function"==typeof r?r:X,Ce(n,t,ji(t),r)}function sc(n){return vc(n)&&n!=+n}function hc(n){if(Es(n))throw new fl(rn);return Ue(n)}function pc(n){return null===n}function _c(n){return null==n}function vc(n){return"number"==typeof n||cc(n)&&we(n)==Hn}function gc(n){if(!cc(n)||we(n)!=Yn)return!1;var t=El(n);if(null===t)return!0;var r=bl.call(t,"constructor")&&t.constructor;return"function"==typeof r&&r instanceof r&&dl.call(r)==jl}function yc(n){
return ic(n)&&n>=-Wn&&n<=Wn}function dc(n){return"string"==typeof n||!bh(n)&&cc(n)&&we(n)==rt}function bc(n){return"symbol"==typeof n||cc(n)&&we(n)==et}function wc(n){return n===X}function mc(n){return cc(n)&&zs(n)==it}function xc(n){return cc(n)&&we(n)==ot}function jc(n){if(!n)return[];if(Hf(n))return dc(n)?G(n):Tu(n);if(Ul&&n[Ul])return D(n[Ul]());var t=zs(n);return(t==Gn?M:t==tt?P:ra)(n)}function Ac(n){if(!n)return 0===n?n:0;if(n=Ic(n),n===Sn||n===-Sn){return(n<0?-1:1)*Ln}return n===n?n:0}function kc(n){
var t=Ac(n),r=t%1;return t===t?r?t-r:t:0}function Oc(n){return n?Mr(kc(n),0,Un):0}function Ic(n){if("number"==typeof n)return n;if(bc(n))return Cn;if(fc(n)){var t="function"==typeof n.valueOf?n.valueOf():n;n=fc(t)?t+"":t}if("string"!=typeof n)return 0===n?n:+n;n=R(n);var r=qt.test(n);return r||Kt.test(n)?Xr(n.slice(2),r?2:8):Pt.test(n)?Cn:+n}function Rc(n){return $u(n,qc(n))}function zc(n){return n?Mr(kc(n),-Wn,Wn):0===n?n:0}function Ec(n){return null==n?"":vu(n)}function Sc(n,t){var r=gs(n);return null==t?r:Cr(r,t);
}function Wc(n,t){return v(n,mi(t,3),ue)}function Lc(n,t){return v(n,mi(t,3),oe)}function Cc(n,t){return null==n?n:bs(n,mi(t,3),qc)}function Uc(n,t){return null==n?n:ws(n,mi(t,3),qc)}function Bc(n,t){return n&&ue(n,mi(t,3))}function Tc(n,t){return n&&oe(n,mi(t,3))}function $c(n){return null==n?[]:fe(n,Pc(n))}function Dc(n){return null==n?[]:fe(n,qc(n))}function Mc(n,t,r){var e=null==n?X:_e(n,t);return e===X?r:e}function Fc(n,t){return null!=n&&Ri(n,t,xe)}function Nc(n,t){return null!=n&&Ri(n,t,je);
}function Pc(n){return Hf(n)?Or(n):Me(n)}function qc(n){return Hf(n)?Or(n,!0):Fe(n)}function Zc(n,t){var r={};return t=mi(t,3),ue(n,function(n,e,u){Br(r,t(n,e,u),n)}),r}function Kc(n,t){var r={};return t=mi(t,3),ue(n,function(n,e,u){Br(r,e,t(n,e,u))}),r}function Vc(n,t){return Gc(n,Uf(mi(t)))}function Gc(n,t){if(null==n)return{};var r=c(di(n),function(n){return[n]});return t=mi(t),Ye(n,r,function(n,r){return t(n,r[0])})}function Hc(n,t,r){t=ku(t,n);var e=-1,u=t.length;for(u||(u=1,n=X);++e<u;){var i=null==n?X:n[no(t[e])];
i===X&&(e=u,i=r),n=uc(i)?i.call(n):i}return n}function Jc(n,t,r){return null==n?n:fu(n,t,r)}function Yc(n,t,r,e){return e="function"==typeof e?e:X,null==n?n:fu(n,t,r,e)}function Qc(n,t,e){var u=bh(n),i=u||mh(n)||Oh(n);if(t=mi(t,4),null==e){var o=n&&n.constructor;e=i?u?new o:[]:fc(n)&&uc(o)?gs(El(n)):{}}return(i?r:ue)(n,function(n,r,u){return t(e,n,r,u)}),e}function Xc(n,t){return null==n||yu(n,t)}function na(n,t,r){return null==n?n:du(n,t,Au(r))}function ta(n,t,r,e){return e="function"==typeof e?e:X,
null==n?n:du(n,t,Au(r),e)}function ra(n){return null==n?[]:E(n,Pc(n))}function ea(n){return null==n?[]:E(n,qc(n))}function ua(n,t,r){return r===X&&(r=t,t=X),r!==X&&(r=Ic(r),r=r===r?r:0),t!==X&&(t=Ic(t),t=t===t?t:0),Mr(Ic(n),t,r)}function ia(n,t,r){return t=Ac(t),r===X?(r=t,t=0):r=Ac(r),n=Ic(n),Ae(n,t,r)}function oa(n,t,r){if(r&&"boolean"!=typeof r&&Ui(n,t,r)&&(t=r=X),r===X&&("boolean"==typeof t?(r=t,t=X):"boolean"==typeof n&&(r=n,n=X)),n===X&&t===X?(n=0,t=1):(n=Ac(n),t===X?(t=n,n=0):t=Ac(t)),n>t){
var e=n;n=t,t=e}if(r||n%1||t%1){var u=Ql();return Hl(n+u*(t-n+Qr("1e-"+((u+"").length-1))),t)}return tu(n,t)}function fa(n){return Qh(Ec(n).toLowerCase())}function ca(n){return n=Ec(n),n&&n.replace(Gt,ve).replace(Dr,"")}function aa(n,t,r){n=Ec(n),t=vu(t);var e=n.length;r=r===X?e:Mr(kc(r),0,e);var u=r;return r-=t.length,r>=0&&n.slice(r,u)==t}function la(n){return n=Ec(n),n&&At.test(n)?n.replace(xt,ge):n}function sa(n){return n=Ec(n),n&&Wt.test(n)?n.replace(St,"\\$&"):n}function ha(n,t,r){n=Ec(n),t=kc(t);
var e=t?V(n):0;if(!t||e>=t)return n;var u=(t-e)/2;return ri(Nl(u),r)+n+ri(Fl(u),r)}function pa(n,t,r){n=Ec(n),t=kc(t);var e=t?V(n):0;return t&&e<t?n+ri(t-e,r):n}function _a(n,t,r){n=Ec(n),t=kc(t);var e=t?V(n):0;return t&&e<t?ri(t-e,r)+n:n}function va(n,t,r){return r||null==t?t=0:t&&(t=+t),Yl(Ec(n).replace(Lt,""),t||0)}function ga(n,t,r){return t=(r?Ui(n,t,r):t===X)?1:kc(t),eu(Ec(n),t)}function ya(){var n=arguments,t=Ec(n[0]);return n.length<3?t:t.replace(n[1],n[2])}function da(n,t,r){return r&&"number"!=typeof r&&Ui(n,t,r)&&(t=r=X),
(r=r===X?Un:r>>>0)?(n=Ec(n),n&&("string"==typeof t||null!=t&&!Ah(t))&&(t=vu(t),!t&&T(n))?Ou(G(n),0,r):n.split(t,r)):[]}function ba(n,t,r){return n=Ec(n),r=null==r?0:Mr(kc(r),0,n.length),t=vu(t),n.slice(r,r+t.length)==t}function wa(n,t,r){var e=Z.templateSettings;r&&Ui(n,t,r)&&(t=X),n=Ec(n),t=Sh({},t,e,li);var u,i,o=Sh({},t.imports,e.imports,li),f=Pc(o),c=E(o,f),a=0,l=t.interpolate||Ht,s="__p += '",h=sl((t.escape||Ht).source+"|"+l.source+"|"+(l===It?Ft:Ht).source+"|"+(t.evaluate||Ht).source+"|$","g"),p="//# sourceURL="+(bl.call(t,"sourceURL")?(t.sourceURL+"").replace(/\s/g," "):"lodash.templateSources["+ ++Zr+"]")+"\n";
n.replace(h,function(t,r,e,o,f,c){return e||(e=o),s+=n.slice(a,c).replace(Jt,U),r&&(u=!0,s+="' +\n__e("+r+") +\n'"),f&&(i=!0,s+="';\n"+f+";\n__p += '"),e&&(s+="' +\n((__t = ("+e+")) == null ? '' : __t) +\n'"),a=c+t.length,t}),s+="';\n";var _=bl.call(t,"variable")&&t.variable;if(_){if(Dt.test(_))throw new fl(un)}else s="with (obj) {\n"+s+"\n}\n";s=(i?s.replace(dt,""):s).replace(bt,"$1").replace(wt,"$1;"),s="function("+(_||"obj")+") {\n"+(_?"":"obj || (obj = {});\n")+"var __t, __p = ''"+(u?", __e = _.escape":"")+(i?", __j = Array.prototype.join;\nfunction print() { __p += __j.call(arguments, '') }\n":";\n")+s+"return __p\n}";
var v=Xh(function(){return cl(f,p+"return "+s).apply(X,c)});if(v.source=s,rc(v))throw v;return v}function ma(n){return Ec(n).toLowerCase()}function xa(n){return Ec(n).toUpperCase()}function ja(n,t,r){if(n=Ec(n),n&&(r||t===X))return R(n);if(!n||!(t=vu(t)))return n;var e=G(n),u=G(t);return Ou(e,W(e,u),L(e,u)+1).join("")}function Aa(n,t,r){if(n=Ec(n),n&&(r||t===X))return n.slice(0,H(n)+1);if(!n||!(t=vu(t)))return n;var e=G(n);return Ou(e,0,L(e,G(t))+1).join("")}function ka(n,t,r){if(n=Ec(n),n&&(r||t===X))return n.replace(Lt,"");
if(!n||!(t=vu(t)))return n;var e=G(n);return Ou(e,W(e,G(t))).join("")}function Oa(n,t){var r=An,e=kn;if(fc(t)){var u="separator"in t?t.separator:u;r="length"in t?kc(t.length):r,e="omission"in t?vu(t.omission):e}n=Ec(n);var i=n.length;if(T(n)){var o=G(n);i=o.length}if(r>=i)return n;var f=r-V(e);if(f<1)return e;var c=o?Ou(o,0,f).join(""):n.slice(0,f);if(u===X)return c+e;if(o&&(f+=c.length-f),Ah(u)){if(n.slice(f).search(u)){var a,l=c;for(u.global||(u=sl(u.source,Ec(Nt.exec(u))+"g")),u.lastIndex=0;a=u.exec(l);)var s=a.index;
c=c.slice(0,s===X?f:s)}}else if(n.indexOf(vu(u),f)!=f){var h=c.lastIndexOf(u);h>-1&&(c=c.slice(0,h))}return c+e}function Ia(n){return n=Ec(n),n&&jt.test(n)?n.replace(mt,ye):n}function Ra(n,t,r){return n=Ec(n),t=r?X:t,t===X?$(n)?Q(n):_(n):n.match(t)||[]}function za(t){var r=null==t?0:t.length,e=mi();return t=r?c(t,function(n){if("function"!=typeof n[1])throw new pl(en);return[e(n[0]),n[1]]}):[],uu(function(e){for(var u=-1;++u<r;){var i=t[u];if(n(i[0],this,e))return n(i[1],this,e)}})}function Ea(n){
return Nr(Fr(n,an))}function Sa(n){return function(){return n}}function Wa(n,t){return null==n||n!==n?t:n}function La(n){return n}function Ca(n){return De("function"==typeof n?n:Fr(n,an))}function Ua(n){return qe(Fr(n,an))}function Ba(n,t){return Ze(n,Fr(t,an))}function Ta(n,t,e){var u=Pc(t),i=fe(t,u);null!=e||fc(t)&&(i.length||!u.length)||(e=t,t=n,n=this,i=fe(t,Pc(t)));var o=!(fc(e)&&"chain"in e&&!e.chain),f=uc(n);return r(i,function(r){var e=t[r];n[r]=e,f&&(n.prototype[r]=function(){var t=this.__chain__;
if(o||t){var r=n(this.__wrapped__);return(r.__actions__=Tu(this.__actions__)).push({func:e,args:arguments,thisArg:n}),r.__chain__=t,r}return e.apply(n,a([this.value()],arguments))})}),n}function $a(){return re._===this&&(re._=Al),this}function Da(){}function Ma(n){return n=kc(n),uu(function(t){return Ge(t,n)})}function Fa(n){return Bi(n)?m(no(n)):Qe(n)}function Na(n){return function(t){return null==n?X:_e(n,t)}}function Pa(){return[]}function qa(){return!1}function Za(){return{}}function Ka(){return"";
}function Va(){return!0}function Ga(n,t){if(n=kc(n),n<1||n>Wn)return[];var r=Un,e=Hl(n,Un);t=mi(t),n-=Un;for(var u=O(e,t);++r<n;)t(r);return u}function Ha(n){return bh(n)?c(n,no):bc(n)?[n]:Tu(Cs(Ec(n)))}function Ja(n){var t=++wl;return Ec(n)+t}function Ya(n){return n&&n.length?Yr(n,La,me):X}function Qa(n,t){return n&&n.length?Yr(n,mi(t,2),me):X}function Xa(n){return w(n,La)}function nl(n,t){return w(n,mi(t,2))}function tl(n){return n&&n.length?Yr(n,La,Ne):X}function rl(n,t){return n&&n.length?Yr(n,mi(t,2),Ne):X;
}function el(n){return n&&n.length?k(n,La):0}function ul(n,t){return n&&n.length?k(n,mi(t,2)):0}x=null==x?re:be.defaults(re.Object(),x,be.pick(re,qr));var il=x.Array,ol=x.Date,fl=x.Error,cl=x.Function,al=x.Math,ll=x.Object,sl=x.RegExp,hl=x.String,pl=x.TypeError,_l=il.prototype,vl=cl.prototype,gl=ll.prototype,yl=x["__core-js_shared__"],dl=vl.toString,bl=gl.hasOwnProperty,wl=0,ml=function(){var n=/[^.]+$/.exec(yl&&yl.keys&&yl.keys.IE_PROTO||"");return n?"Symbol(src)_1."+n:""}(),xl=gl.toString,jl=dl.call(ll),Al=re._,kl=sl("^"+dl.call(bl).replace(St,"\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$"),Ol=ie?x.Buffer:X,Il=x.Symbol,Rl=x.Uint8Array,zl=Ol?Ol.allocUnsafe:X,El=F(ll.getPrototypeOf,ll),Sl=ll.create,Wl=gl.propertyIsEnumerable,Ll=_l.splice,Cl=Il?Il.isConcatSpreadable:X,Ul=Il?Il.iterator:X,Bl=Il?Il.toStringTag:X,Tl=function(){
try{var n=Ai(ll,"defineProperty");return n({},"",{}),n}catch(n){}}(),$l=x.clearTimeout!==re.clearTimeout&&x.clearTimeout,Dl=ol&&ol.now!==re.Date.now&&ol.now,Ml=x.setTimeout!==re.setTimeout&&x.setTimeout,Fl=al.ceil,Nl=al.floor,Pl=ll.getOwnPropertySymbols,ql=Ol?Ol.isBuffer:X,Zl=x.isFinite,Kl=_l.join,Vl=F(ll.keys,ll),Gl=al.max,Hl=al.min,Jl=ol.now,Yl=x.parseInt,Ql=al.random,Xl=_l.reverse,ns=Ai(x,"DataView"),ts=Ai(x,"Map"),rs=Ai(x,"Promise"),es=Ai(x,"Set"),us=Ai(x,"WeakMap"),is=Ai(ll,"create"),os=us&&new us,fs={},cs=to(ns),as=to(ts),ls=to(rs),ss=to(es),hs=to(us),ps=Il?Il.prototype:X,_s=ps?ps.valueOf:X,vs=ps?ps.toString:X,gs=function(){
function n(){}return function(t){if(!fc(t))return{};if(Sl)return Sl(t);n.prototype=t;var r=new n;return n.prototype=X,r}}();Z.templateSettings={escape:kt,evaluate:Ot,interpolate:It,variable:"",imports:{_:Z}},Z.prototype=J.prototype,Z.prototype.constructor=Z,Y.prototype=gs(J.prototype),Y.prototype.constructor=Y,Ct.prototype=gs(J.prototype),Ct.prototype.constructor=Ct,Xt.prototype.clear=nr,Xt.prototype.delete=tr,Xt.prototype.get=rr,Xt.prototype.has=er,Xt.prototype.set=ur,ir.prototype.clear=or,ir.prototype.delete=fr,
ir.prototype.get=cr,ir.prototype.has=ar,ir.prototype.set=lr,sr.prototype.clear=hr,sr.prototype.delete=pr,sr.prototype.get=_r,sr.prototype.has=vr,sr.prototype.set=gr,yr.prototype.add=yr.prototype.push=dr,yr.prototype.has=br,wr.prototype.clear=mr,wr.prototype.delete=xr,wr.prototype.get=jr,wr.prototype.has=Ar,wr.prototype.set=kr;var ys=Pu(ue),ds=Pu(oe,!0),bs=qu(),ws=qu(!0),ms=os?function(n,t){return os.set(n,t),n}:La,xs=Tl?function(n,t){return Tl(n,"toString",{configurable:!0,enumerable:!1,value:Sa(t),
writable:!0})}:La,js=uu,As=$l||function(n){return re.clearTimeout(n)},ks=es&&1/P(new es([,-0]))[1]==Sn?function(n){return new es(n)}:Da,Os=os?function(n){return os.get(n)}:Da,Is=Pl?function(n){return null==n?[]:(n=ll(n),i(Pl(n),function(t){return Wl.call(n,t)}))}:Pa,Rs=Pl?function(n){for(var t=[];n;)a(t,Is(n)),n=El(n);return t}:Pa,zs=we;(ns&&zs(new ns(new ArrayBuffer(1)))!=ct||ts&&zs(new ts)!=Gn||rs&&zs(rs.resolve())!=Qn||es&&zs(new es)!=tt||us&&zs(new us)!=it)&&(zs=function(n){var t=we(n),r=t==Yn?n.constructor:X,e=r?to(r):"";
if(e)switch(e){case cs:return ct;case as:return Gn;case ls:return Qn;case ss:return tt;case hs:return it}return t});var Es=yl?uc:qa,Ss=Qi(ms),Ws=Ml||function(n,t){return re.setTimeout(n,t)},Ls=Qi(xs),Cs=Pi(function(n){var t=[];return 46===n.charCodeAt(0)&&t.push(""),n.replace(Et,function(n,r,e,u){t.push(e?u.replace(Mt,"$1"):r||n)}),t}),Us=uu(function(n,t){return Jf(n)?Hr(n,ee(t,1,Jf,!0)):[]}),Bs=uu(function(n,t){var r=jo(t);return Jf(r)&&(r=X),Jf(n)?Hr(n,ee(t,1,Jf,!0),mi(r,2)):[]}),Ts=uu(function(n,t){
var r=jo(t);return Jf(r)&&(r=X),Jf(n)?Hr(n,ee(t,1,Jf,!0),X,r):[]}),$s=uu(function(n){var t=c(n,ju);return t.length&&t[0]===n[0]?ke(t):[]}),Ds=uu(function(n){var t=jo(n),r=c(n,ju);return t===jo(r)?t=X:r.pop(),r.length&&r[0]===n[0]?ke(r,mi(t,2)):[]}),Ms=uu(function(n){var t=jo(n),r=c(n,ju);return t="function"==typeof t?t:X,t&&r.pop(),r.length&&r[0]===n[0]?ke(r,X,t):[]}),Fs=uu(Oo),Ns=gi(function(n,t){var r=null==n?0:n.length,e=Tr(n,t);return nu(n,c(t,function(n){return Ci(n,r)?+n:n}).sort(Lu)),e}),Ps=uu(function(n){
return gu(ee(n,1,Jf,!0))}),qs=uu(function(n){var t=jo(n);return Jf(t)&&(t=X),gu(ee(n,1,Jf,!0),mi(t,2))}),Zs=uu(function(n){var t=jo(n);return t="function"==typeof t?t:X,gu(ee(n,1,Jf,!0),X,t)}),Ks=uu(function(n,t){return Jf(n)?Hr(n,t):[]}),Vs=uu(function(n){return mu(i(n,Jf))}),Gs=uu(function(n){var t=jo(n);return Jf(t)&&(t=X),mu(i(n,Jf),mi(t,2))}),Hs=uu(function(n){var t=jo(n);return t="function"==typeof t?t:X,mu(i(n,Jf),X,t)}),Js=uu(Go),Ys=uu(function(n){var t=n.length,r=t>1?n[t-1]:X;return r="function"==typeof r?(n.pop(),
r):X,Ho(n,r)}),Qs=gi(function(n){var t=n.length,r=t?n[0]:0,e=this.__wrapped__,u=function(t){return Tr(t,n)};return!(t>1||this.__actions__.length)&&e instanceof Ct&&Ci(r)?(e=e.slice(r,+r+(t?1:0)),e.__actions__.push({func:nf,args:[u],thisArg:X}),new Y(e,this.__chain__).thru(function(n){return t&&!n.length&&n.push(X),n})):this.thru(u)}),Xs=Fu(function(n,t,r){bl.call(n,r)?++n[r]:Br(n,r,1)}),nh=Ju(ho),th=Ju(po),rh=Fu(function(n,t,r){bl.call(n,r)?n[r].push(t):Br(n,r,[t])}),eh=uu(function(t,r,e){var u=-1,i="function"==typeof r,o=Hf(t)?il(t.length):[];
return ys(t,function(t){o[++u]=i?n(r,t,e):Ie(t,r,e)}),o}),uh=Fu(function(n,t,r){Br(n,r,t)}),ih=Fu(function(n,t,r){n[r?0:1].push(t)},function(){return[[],[]]}),oh=uu(function(n,t){if(null==n)return[];var r=t.length;return r>1&&Ui(n,t[0],t[1])?t=[]:r>2&&Ui(t[0],t[1],t[2])&&(t=[t[0]]),He(n,ee(t,1),[])}),fh=Dl||function(){return re.Date.now()},ch=uu(function(n,t,r){var e=_n;if(r.length){var u=N(r,wi(ch));e|=bn}return ai(n,e,t,r,u)}),ah=uu(function(n,t,r){var e=_n|vn;if(r.length){var u=N(r,wi(ah));e|=bn;
}return ai(t,e,n,r,u)}),lh=uu(function(n,t){return Gr(n,1,t)}),sh=uu(function(n,t,r){return Gr(n,Ic(t)||0,r)});Cf.Cache=sr;var hh=js(function(t,r){r=1==r.length&&bh(r[0])?c(r[0],z(mi())):c(ee(r,1),z(mi()));var e=r.length;return uu(function(u){for(var i=-1,o=Hl(u.length,e);++i<o;)u[i]=r[i].call(this,u[i]);return n(t,this,u)})}),ph=uu(function(n,t){return ai(n,bn,X,t,N(t,wi(ph)))}),_h=uu(function(n,t){return ai(n,wn,X,t,N(t,wi(_h)))}),vh=gi(function(n,t){return ai(n,xn,X,X,X,t)}),gh=ii(me),yh=ii(function(n,t){
return n>=t}),dh=Re(function(){return arguments}())?Re:function(n){return cc(n)&&bl.call(n,"callee")&&!Wl.call(n,"callee")},bh=il.isArray,wh=ce?z(ce):ze,mh=ql||qa,xh=ae?z(ae):Ee,jh=le?z(le):Le,Ah=se?z(se):Be,kh=he?z(he):Te,Oh=pe?z(pe):$e,Ih=ii(Ne),Rh=ii(function(n,t){return n<=t}),zh=Nu(function(n,t){if(Mi(t)||Hf(t))return $u(t,Pc(t),n),X;for(var r in t)bl.call(t,r)&&Sr(n,r,t[r])}),Eh=Nu(function(n,t){$u(t,qc(t),n)}),Sh=Nu(function(n,t,r,e){$u(t,qc(t),n,e)}),Wh=Nu(function(n,t,r,e){$u(t,Pc(t),n,e);
}),Lh=gi(Tr),Ch=uu(function(n,t){n=ll(n);var r=-1,e=t.length,u=e>2?t[2]:X;for(u&&Ui(t[0],t[1],u)&&(e=1);++r<e;)for(var i=t[r],o=qc(i),f=-1,c=o.length;++f<c;){var a=o[f],l=n[a];(l===X||Gf(l,gl[a])&&!bl.call(n,a))&&(n[a]=i[a])}return n}),Uh=uu(function(t){return t.push(X,si),n(Mh,X,t)}),Bh=Xu(function(n,t,r){null!=t&&"function"!=typeof t.toString&&(t=xl.call(t)),n[t]=r},Sa(La)),Th=Xu(function(n,t,r){null!=t&&"function"!=typeof t.toString&&(t=xl.call(t)),bl.call(n,t)?n[t].push(r):n[t]=[r]},mi),$h=uu(Ie),Dh=Nu(function(n,t,r){
Ke(n,t,r)}),Mh=Nu(function(n,t,r,e){Ke(n,t,r,e)}),Fh=gi(function(n,t){var r={};if(null==n)return r;var e=!1;t=c(t,function(t){return t=ku(t,n),e||(e=t.length>1),t}),$u(n,di(n),r),e&&(r=Fr(r,an|ln|sn,hi));for(var u=t.length;u--;)yu(r,t[u]);return r}),Nh=gi(function(n,t){return null==n?{}:Je(n,t)}),Ph=ci(Pc),qh=ci(qc),Zh=Vu(function(n,t,r){return t=t.toLowerCase(),n+(r?fa(t):t)}),Kh=Vu(function(n,t,r){return n+(r?"-":"")+t.toLowerCase()}),Vh=Vu(function(n,t,r){return n+(r?" ":"")+t.toLowerCase()}),Gh=Ku("toLowerCase"),Hh=Vu(function(n,t,r){
return n+(r?"_":"")+t.toLowerCase()}),Jh=Vu(function(n,t,r){return n+(r?" ":"")+Qh(t)}),Yh=Vu(function(n,t,r){return n+(r?" ":"")+t.toUpperCase()}),Qh=Ku("toUpperCase"),Xh=uu(function(t,r){try{return n(t,X,r)}catch(n){return rc(n)?n:new fl(n)}}),np=gi(function(n,t){return r(t,function(t){t=no(t),Br(n,t,ch(n[t],n))}),n}),tp=Yu(),rp=Yu(!0),ep=uu(function(n,t){return function(r){return Ie(r,n,t)}}),up=uu(function(n,t){return function(r){return Ie(n,r,t)}}),ip=ti(c),op=ti(u),fp=ti(h),cp=ui(),ap=ui(!0),lp=ni(function(n,t){
return n+t},0),sp=fi("ceil"),hp=ni(function(n,t){return n/t},1),pp=fi("floor"),_p=ni(function(n,t){return n*t},1),vp=fi("round"),gp=ni(function(n,t){return n-t},0);return Z.after=If,Z.ary=Rf,Z.assign=zh,Z.assignIn=Eh,Z.assignInWith=Sh,Z.assignWith=Wh,Z.at=Lh,Z.before=zf,Z.bind=ch,Z.bindAll=np,Z.bindKey=ah,Z.castArray=Nf,Z.chain=Qo,Z.chunk=uo,Z.compact=io,Z.concat=oo,Z.cond=za,Z.conforms=Ea,Z.constant=Sa,Z.countBy=Xs,Z.create=Sc,Z.curry=Ef,Z.curryRight=Sf,Z.debounce=Wf,Z.defaults=Ch,Z.defaultsDeep=Uh,
Z.defer=lh,Z.delay=sh,Z.difference=Us,Z.differenceBy=Bs,Z.differenceWith=Ts,Z.drop=fo,Z.dropRight=co,Z.dropRightWhile=ao,Z.dropWhile=lo,Z.fill=so,Z.filter=lf,Z.flatMap=sf,Z.flatMapDeep=hf,Z.flatMapDepth=pf,Z.flatten=_o,Z.flattenDeep=vo,Z.flattenDepth=go,Z.flip=Lf,Z.flow=tp,Z.flowRight=rp,Z.fromPairs=yo,Z.functions=$c,Z.functionsIn=Dc,Z.groupBy=rh,Z.initial=mo,Z.intersection=$s,Z.intersectionBy=Ds,Z.intersectionWith=Ms,Z.invert=Bh,Z.invertBy=Th,Z.invokeMap=eh,Z.iteratee=Ca,Z.keyBy=uh,Z.keys=Pc,Z.keysIn=qc,
Z.map=yf,Z.mapKeys=Zc,Z.mapValues=Kc,Z.matches=Ua,Z.matchesProperty=Ba,Z.memoize=Cf,Z.merge=Dh,Z.mergeWith=Mh,Z.method=ep,Z.methodOf=up,Z.mixin=Ta,Z.negate=Uf,Z.nthArg=Ma,Z.omit=Fh,Z.omitBy=Vc,Z.once=Bf,Z.orderBy=df,Z.over=ip,Z.overArgs=hh,Z.overEvery=op,Z.overSome=fp,Z.partial=ph,Z.partialRight=_h,Z.partition=ih,Z.pick=Nh,Z.pickBy=Gc,Z.property=Fa,Z.propertyOf=Na,Z.pull=Fs,Z.pullAll=Oo,Z.pullAllBy=Io,Z.pullAllWith=Ro,Z.pullAt=Ns,Z.range=cp,Z.rangeRight=ap,Z.rearg=vh,Z.reject=mf,Z.remove=zo,Z.rest=Tf,
Z.reverse=Eo,Z.sampleSize=jf,Z.set=Jc,Z.setWith=Yc,Z.shuffle=Af,Z.slice=So,Z.sortBy=oh,Z.sortedUniq=$o,Z.sortedUniqBy=Do,Z.split=da,Z.spread=$f,Z.tail=Mo,Z.take=Fo,Z.takeRight=No,Z.takeRightWhile=Po,Z.takeWhile=qo,Z.tap=Xo,Z.throttle=Df,Z.thru=nf,Z.toArray=jc,Z.toPairs=Ph,Z.toPairsIn=qh,Z.toPath=Ha,Z.toPlainObject=Rc,Z.transform=Qc,Z.unary=Mf,Z.union=Ps,Z.unionBy=qs,Z.unionWith=Zs,Z.uniq=Zo,Z.uniqBy=Ko,Z.uniqWith=Vo,Z.unset=Xc,Z.unzip=Go,Z.unzipWith=Ho,Z.update=na,Z.updateWith=ta,Z.values=ra,Z.valuesIn=ea,
Z.without=Ks,Z.words=Ra,Z.wrap=Ff,Z.xor=Vs,Z.xorBy=Gs,Z.xorWith=Hs,Z.zip=Js,Z.zipObject=Jo,Z.zipObjectDeep=Yo,Z.zipWith=Ys,Z.entries=Ph,Z.entriesIn=qh,Z.extend=Eh,Z.extendWith=Sh,Ta(Z,Z),Z.add=lp,Z.attempt=Xh,Z.camelCase=Zh,Z.capitalize=fa,Z.ceil=sp,Z.clamp=ua,Z.clone=Pf,Z.cloneDeep=Zf,Z.cloneDeepWith=Kf,Z.cloneWith=qf,Z.conformsTo=Vf,Z.deburr=ca,Z.defaultTo=Wa,Z.divide=hp,Z.endsWith=aa,Z.eq=Gf,Z.escape=la,Z.escapeRegExp=sa,Z.every=af,Z.find=nh,Z.findIndex=ho,Z.findKey=Wc,Z.findLast=th,Z.findLastIndex=po,
Z.findLastKey=Lc,Z.floor=pp,Z.forEach=_f,Z.forEachRight=vf,Z.forIn=Cc,Z.forInRight=Uc,Z.forOwn=Bc,Z.forOwnRight=Tc,Z.get=Mc,Z.gt=gh,Z.gte=yh,Z.has=Fc,Z.hasIn=Nc,Z.head=bo,Z.identity=La,Z.includes=gf,Z.indexOf=wo,Z.inRange=ia,Z.invoke=$h,Z.isArguments=dh,Z.isArray=bh,Z.isArrayBuffer=wh,Z.isArrayLike=Hf,Z.isArrayLikeObject=Jf,Z.isBoolean=Yf,Z.isBuffer=mh,Z.isDate=xh,Z.isElement=Qf,Z.isEmpty=Xf,Z.isEqual=nc,Z.isEqualWith=tc,Z.isError=rc,Z.isFinite=ec,Z.isFunction=uc,Z.isInteger=ic,Z.isLength=oc,Z.isMap=jh,
Z.isMatch=ac,Z.isMatchWith=lc,Z.isNaN=sc,Z.isNative=hc,Z.isNil=_c,Z.isNull=pc,Z.isNumber=vc,Z.isObject=fc,Z.isObjectLike=cc,Z.isPlainObject=gc,Z.isRegExp=Ah,Z.isSafeInteger=yc,Z.isSet=kh,Z.isString=dc,Z.isSymbol=bc,Z.isTypedArray=Oh,Z.isUndefined=wc,Z.isWeakMap=mc,Z.isWeakSet=xc,Z.join=xo,Z.kebabCase=Kh,Z.last=jo,Z.lastIndexOf=Ao,Z.lowerCase=Vh,Z.lowerFirst=Gh,Z.lt=Ih,Z.lte=Rh,Z.max=Ya,Z.maxBy=Qa,Z.mean=Xa,Z.meanBy=nl,Z.min=tl,Z.minBy=rl,Z.stubArray=Pa,Z.stubFalse=qa,Z.stubObject=Za,Z.stubString=Ka,
Z.stubTrue=Va,Z.multiply=_p,Z.nth=ko,Z.noConflict=$a,Z.noop=Da,Z.now=fh,Z.pad=ha,Z.padEnd=pa,Z.padStart=_a,Z.parseInt=va,Z.random=oa,Z.reduce=bf,Z.reduceRight=wf,Z.repeat=ga,Z.replace=ya,Z.result=Hc,Z.round=vp,Z.runInContext=p,Z.sample=xf,Z.size=kf,Z.snakeCase=Hh,Z.some=Of,Z.sortedIndex=Wo,Z.sortedIndexBy=Lo,Z.sortedIndexOf=Co,Z.sortedLastIndex=Uo,Z.sortedLastIndexBy=Bo,Z.sortedLastIndexOf=To,Z.startCase=Jh,Z.startsWith=ba,Z.subtract=gp,Z.sum=el,Z.sumBy=ul,Z.template=wa,Z.times=Ga,Z.toFinite=Ac,Z.toInteger=kc,
Z.toLength=Oc,Z.toLower=ma,Z.toNumber=Ic,Z.toSafeInteger=zc,Z.toString=Ec,Z.toUpper=xa,Z.trim=ja,Z.trimEnd=Aa,Z.trimStart=ka,Z.truncate=Oa,Z.unescape=Ia,Z.uniqueId=Ja,Z.upperCase=Yh,Z.upperFirst=Qh,Z.each=_f,Z.eachRight=vf,Z.first=bo,Ta(Z,function(){var n={};return ue(Z,function(t,r){bl.call(Z.prototype,r)||(n[r]=t)}),n}(),{chain:!1}),Z.VERSION=nn,r(["bind","bindKey","curry","curryRight","partial","partialRight"],function(n){Z[n].placeholder=Z}),r(["drop","take"],function(n,t){Ct.prototype[n]=function(r){
r=r===X?1:Gl(kc(r),0);var e=this.__filtered__&&!t?new Ct(this):this.clone();return e.__filtered__?e.__takeCount__=Hl(r,e.__takeCount__):e.__views__.push({size:Hl(r,Un),type:n+(e.__dir__<0?"Right":"")}),e},Ct.prototype[n+"Right"]=function(t){return this.reverse()[n](t).reverse()}}),r(["filter","map","takeWhile"],function(n,t){var r=t+1,e=r==Rn||r==En;Ct.prototype[n]=function(n){var t=this.clone();return t.__iteratees__.push({iteratee:mi(n,3),type:r}),t.__filtered__=t.__filtered__||e,t}}),r(["head","last"],function(n,t){
var r="take"+(t?"Right":"");Ct.prototype[n]=function(){return this[r](1).value()[0]}}),r(["initial","tail"],function(n,t){var r="drop"+(t?"":"Right");Ct.prototype[n]=function(){return this.__filtered__?new Ct(this):this[r](1)}}),Ct.prototype.compact=function(){return this.filter(La)},Ct.prototype.find=function(n){return this.filter(n).head()},Ct.prototype.findLast=function(n){return this.reverse().find(n)},Ct.prototype.invokeMap=uu(function(n,t){return"function"==typeof n?new Ct(this):this.map(function(r){
return Ie(r,n,t)})}),Ct.prototype.reject=function(n){return this.filter(Uf(mi(n)))},Ct.prototype.slice=function(n,t){n=kc(n);var r=this;return r.__filtered__&&(n>0||t<0)?new Ct(r):(n<0?r=r.takeRight(-n):n&&(r=r.drop(n)),t!==X&&(t=kc(t),r=t<0?r.dropRight(-t):r.take(t-n)),r)},Ct.prototype.takeRightWhile=function(n){return this.reverse().takeWhile(n).reverse()},Ct.prototype.toArray=function(){return this.take(Un)},ue(Ct.prototype,function(n,t){var r=/^(?:filter|find|map|reject)|While$/.test(t),e=/^(?:head|last)$/.test(t),u=Z[e?"take"+("last"==t?"Right":""):t],i=e||/^find/.test(t);
u&&(Z.prototype[t]=function(){var t=this.__wrapped__,o=e?[1]:arguments,f=t instanceof Ct,c=o[0],l=f||bh(t),s=function(n){var t=u.apply(Z,a([n],o));return e&&h?t[0]:t};l&&r&&"function"==typeof c&&1!=c.length&&(f=l=!1);var h=this.__chain__,p=!!this.__actions__.length,_=i&&!h,v=f&&!p;if(!i&&l){t=v?t:new Ct(this);var g=n.apply(t,o);return g.__actions__.push({func:nf,args:[s],thisArg:X}),new Y(g,h)}return _&&v?n.apply(this,o):(g=this.thru(s),_?e?g.value()[0]:g.value():g)})}),r(["pop","push","shift","sort","splice","unshift"],function(n){
var t=_l[n],r=/^(?:push|sort|unshift)$/.test(n)?"tap":"thru",e=/^(?:pop|shift)$/.test(n);Z.prototype[n]=function(){var n=arguments;if(e&&!this.__chain__){var u=this.value();return t.apply(bh(u)?u:[],n)}return this[r](function(r){return t.apply(bh(r)?r:[],n)})}}),ue(Ct.prototype,function(n,t){var r=Z[t];if(r){var e=r.name+"";bl.call(fs,e)||(fs[e]=[]),fs[e].push({name:t,func:r})}}),fs[Qu(X,vn).name]=[{name:"wrapper",func:X}],Ct.prototype.clone=$t,Ct.prototype.reverse=Yt,Ct.prototype.value=Qt,Z.prototype.at=Qs,
Z.prototype.chain=tf,Z.prototype.commit=rf,Z.prototype.next=ef,Z.prototype.plant=of,Z.prototype.reverse=ff,Z.prototype.toJSON=Z.prototype.valueOf=Z.prototype.value=cf,Z.prototype.first=Z.prototype.head,Ul&&(Z.prototype[Ul]=uf),Z},be=de();"function"==typeof define&&"object"==typeof define.amd&&define.amd?(re._=be,define(function(){return be})):ue?((ue.exports=be)._=be,ee._=be):re._=be}).call(this); |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,777 | [Bug][UI Next][V1.0.0-Alpha] An error occurred while importing workflow | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
An error occurred while importing workflow
The reason for the error is that:tenant not exists
<img width="1912" alt="image" src="https://user-images.githubusercontent.com/76080484/157416523-80ccb5fd-3750-44e0-bc33-a42ab8b20f15.png">
### What you expected to happen
The modification is saved successfully
### How to reproduce
Import workflow and modify content click Save
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8777 | https://github.com/apache/dolphinscheduler/pull/8797 | 38fba609cbc046c11678bbfc17253cc3f70aa8e9 | bb1ba967cdd0715bb83f69406e3c184fcecd4294 | "2022-03-09T09:55:23Z" | java | "2022-03-10T04:58:19Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/dag-save-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, ref, computed, onMounted, watch } from 'vue'
import Modal from '@/components/modal'
import { useI18n } from 'vue-i18n'
import {
NForm,
NFormItem,
NInput,
NSelect,
NSwitch,
NInputNumber,
NDynamicInput,
NCheckbox
} from 'naive-ui'
import { queryTenantList } from '@/service/modules/tenants'
import { SaveForm, WorkflowDefinition } from './types'
import { useRoute } from 'vue-router'
import { verifyName } from '@/service/modules/process-definition'
import './x6-style.scss'
import { positiveIntegerRegex } from '@/utils/regex'
const props = {
visible: {
type: Boolean as PropType<boolean>,
default: false
},
// If this prop is passed, it means from definition detail
definition: {
type: Object as PropType<WorkflowDefinition>,
default: undefined
}
}
interface Tenant {
tenantCode: string
id: number
}
export default defineComponent({
name: 'dag-save-modal',
props,
emits: ['update:show', 'save'],
setup(props, context) {
const route = useRoute()
const { t } = useI18n()
const projectCode = Number(route.params.projectCode)
const tenants = ref<Tenant[]>([])
const tenantsDropdown = computed(() => {
if (tenants.value) {
return tenants.value
.map((t) => ({
label: t.tenantCode,
value: t.tenantCode
}))
.concat({ label: 'default', value: 'default' })
}
return []
})
onMounted(() => {
queryTenantList().then((res: any) => {
tenants.value = res
})
})
const formValue = ref<SaveForm>({
name: '',
description: '',
tenantCode: 'default',
timeoutFlag: false,
timeout: 0,
globalParams: [],
release: false
})
const formRef = ref()
const rule = {
name: {
required: true,
message: t('project.dag.dag_name_empty')
},
timeout: {
validator() {
if (
formValue.value.timeoutFlag &&
!positiveIntegerRegex.test(String(formValue.value.timeout))
) {
return new Error(t('project.dag.positive_integer'))
}
}
},
globalParams: {
validator() {
const props = new Set()
for (const param of formValue.value.globalParams) {
const prop = param.value
if (!prop) {
return new Error(t('project.dag.prop_empty'))
}
if (props.has(prop)) {
return new Error(t('project.dag.prop_repeat'))
}
props.add(prop)
}
}
}
}
const onSubmit = () => {
formRef.value.validate(async (valid: any) => {
if (!valid) {
const params = {
name: formValue.value.name
}
if (
props.definition?.processDefinition.name !== formValue.value.name
) {
verifyName(params, projectCode)
.then(() => context.emit('save', formValue.value))
} else {
context.emit('save', formValue.value)
}
}
})
}
const onCancel = () => {
context.emit('update:show', false)
}
const updateModalData = () => {
const process = props.definition?.processDefinition
if (process) {
formValue.value.name = process.name
formValue.value.description = process.description
formValue.value.tenantCode = process.tenantCode
if (process.timeout && process.timeout > 0) {
formValue.value.timeoutFlag = true
formValue.value.timeout = process.timeout
}
formValue.value.globalParams = process.globalParamList.map((param) => ({
key: param.prop,
value: param.value
}))
}
}
onMounted(() => updateModalData())
watch(
() => props.definition?.processDefinition,
() => updateModalData()
)
return () => (
<Modal
show={props.visible}
title={t('project.dag.basic_info')}
onConfirm={onSubmit}
onCancel={onCancel}
autoFocus={false}
>
<NForm
label-width='100'
model={formValue.value}
rules={rule}
size='medium'
label-placement='left'
ref={formRef}
>
<NFormItem label={t('project.dag.workflow_name')} path='name'>
<NInput v-model:value={formValue.value.name} />
</NFormItem>
<NFormItem label={t('project.dag.description')} path='description'>
<NInput
type='textarea'
v-model:value={formValue.value.description}
/>
</NFormItem>
<NFormItem label={t('project.dag.tenant')} path='tenantCode'>
<NSelect
options={tenantsDropdown.value}
v-model:value={formValue.value.tenantCode}
/>
</NFormItem>
<NFormItem label={t('project.dag.timeout_alert')} path='timeoutFlag'>
<NSwitch v-model:value={formValue.value.timeoutFlag} />
</NFormItem>
{formValue.value.timeoutFlag && (
<NFormItem label=' ' path='timeout'>
<NInputNumber
v-model:value={formValue.value.timeout}
show-button={false}
min={0}
v-slots={{
suffix: () => '分'
}}
></NInputNumber>
</NFormItem>
)}
<NFormItem
label={t('project.dag.global_variables')}
path='globalParams'
>
<NDynamicInput
v-model:value={formValue.value.globalParams}
preset='pair'
key-placeholder={t('project.dag.key')}
value-placeholder={t('project.dag.value')}
/>
</NFormItem>
{props.definition && (
<NFormItem label=' ' path='timeoutFlag'>
<NCheckbox v-model:checked={formValue.value.release}>
{t('project.dag.online_directly')}
</NCheckbox>
</NFormItem>
)}
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 7,896 | [Bug] [api] When the project is authorized, it will generate a duplicate authorized project | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
step1, Authorize more than two projects to user a
step2, Cancel some of the projects that have been authorized to user a, please do not cancel all of them
step3, When project authorization is performed again, the list of authorized projects produces duplicate project names
The problem is shown in the following gif demonstration:
![问题描速](https://user-images.githubusercontent.com/56599784/148639491-659b7b18-56c6-4803-ae7d-4d9f5c9de463.gif)
### What you expected to happen
Do not generate duplicate authorized items
### How to reproduce
see 'what happened'
### Anything else
_No response_
### Version
2.0.1
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/7896 | https://github.com/apache/dolphinscheduler/pull/8453 | bb1ba967cdd0715bb83f69406e3c184fcecd4294 | 2aed250ed4cdb33d7a116e59bceb5d30eda6073a | "2022-01-08T09:42:29Z" | java | "2022-03-10T08:58:02Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.AlertGroup;
import org.apache.dolphinscheduler.dao.entity.DatasourceUser;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.ResourcesUser;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* users service impl
*/
@Service
public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
@Autowired
private UserMapper userMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ResourceUserMapper resourceUserMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private AlertGroupMapper alertGroupMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* create user, only system admin have permission
*
* @param loginUser login user
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return create result code
* @throws Exception exception
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> createUser(User loginUser,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state) throws IOException {
Map<String, Object> result = new HashMap<>();
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg);
return result;
}
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!checkTenantExists(tenantId)) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
User user = createUser(userName, userPassword, email, tenantId, phone, queue, state);
Tenant tenant = tenantMapper.queryById(tenantId);
// resource upload startup
if (PropertyUtils.getResUploadStartupState()) {
// if tenant not exists
if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))) {
createTenantDirIfNotExists(tenant.getTenantCode());
}
String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(), user.getId());
HadoopUtils.getInstance().mkdir(userPath);
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
@Override
@Transactional(rollbackFor = RuntimeException.class)
public User createUser(String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state) {
User user = new User();
Date now = new Date();
user.setUserName(userName);
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
user.setEmail(email);
user.setTenantId(tenantId);
user.setPhone(phone);
user.setState(state);
// create general users, administrator users are currently built-in
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(now);
user.setUpdateTime(now);
if (StringUtils.isEmpty(queue)) {
queue = "";
}
user.setQueue(queue);
// save user
userMapper.insert(user);
return user;
}
/***
* create User for ldap login
*/
@Override
@Transactional(rollbackFor = Exception.class)
public User createUser(UserType userType, String userId, String email) {
User user = new User();
Date now = new Date();
user.setUserName(userId);
user.setEmail(email);
// create general users, administrator users are currently built-in
user.setUserType(userType);
user.setCreateTime(now);
user.setUpdateTime(now);
user.setQueue("");
// save user
userMapper.insert(user);
return user;
}
/**
* get user by user name
*
* @param userName user name
* @return exist user or null
*/
@Override
public User getUserByUserName(String userName) {
return userMapper.queryByUserNameAccurately(userName);
}
/**
* query user by id
*
* @param id id
* @return user info
*/
@Override
public User queryUser(int id) {
return userMapper.selectById(id);
}
@Override
public List<User> queryUser(List<Integer> ids) {
if (CollectionUtils.isEmpty(ids)) {
return new ArrayList<>();
}
return userMapper.selectByIds(ids);
}
/**
* query user
*
* @param name name
* @return user info
*/
@Override
public User queryUser(String name) {
return userMapper.queryByUserNameAccurately(name);
}
/**
* query user
*
* @param name name
* @param password password
* @return user info
*/
@Override
public User queryUser(String name, String password) {
String md5 = EncryptionUtils.getMd5(password);
return userMapper.queryUserByNamePassword(name, md5);
}
/**
* get user id by user name
*
* @param name user name
* @return if name empty 0, user not exists -1, user exist user id
*/
@Override
public int getUserIdByName(String name) {
//executor name query
int executorId = 0;
if (StringUtils.isNotEmpty(name)) {
User executor = queryUser(name);
if (null != executor) {
executorId = executor.getId();
} else {
executorId = -1;
}
}
return executorId;
}
/**
* query user list
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return user list page
*/
@Override
public Result queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
Page<User> page = new Page<>(pageNo, pageSize);
IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal);
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) scheduleList.getTotal());
pageInfo.setTotalList(scheduleList.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance user
*
* @param userId user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @param state state
* @param timeZone timeZone
* @return update result code
* @throws Exception exception
*/
@Override
public Map<String, Object> updateUser(User loginUser, int userId,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue,
int state,
String timeZone) throws IOException {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) {
putMsg(result, Status.USER_NAME_EXIST);
return result;
}
user.setUserName(userName);
}
if (StringUtils.isNotEmpty(userPassword)) {
if (!CheckUtils.checkPassword(userPassword)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userPassword);
return result;
}
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
}
if (StringUtils.isNotEmpty(email)) {
if (!CheckUtils.checkEmail(email)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email);
return result;
}
user.setEmail(email);
}
if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone);
return result;
}
if (state == 0 && user.getState() != state && loginUser.getId() == user.getId()) {
putMsg(result, Status.NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT);
return result;
}
if (StringUtils.isNotEmpty(timeZone)) {
if (!CheckUtils.checkTimeZone(timeZone)) {
putMsg(result, Status.TIME_ZONE_ILLEGAL, timeZone);
return result;
}
user.setTimeZone(timeZone);
}
user.setPhone(phone);
user.setQueue(queue);
user.setState(state);
Date now = new Date();
user.setUpdateTime(now);
//if user switches the tenant, the user's resources need to be copied to the new tenant
if (user.getTenantId() != tenantId) {
Tenant oldTenant = tenantMapper.queryById(user.getTenantId());
//query tenant
Tenant newTenant = tenantMapper.queryById(tenantId);
if (newTenant != null) {
// if hdfs startup
if (PropertyUtils.getResUploadStartupState() && oldTenant != null) {
String newTenantCode = newTenant.getTenantCode();
String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode());
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode());
// if old tenant dir exists
if (HadoopUtils.getInstance().exists(oldResourcePath)) {
String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode);
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode);
//file resources list
List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath);
}
//udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath);
}
//Delete the user from the old tenant directory
String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(), userId);
HadoopUtils.getInstance().delete(oldUserPath, true);
} else {
// if old tenant dir not exists , create
createTenantDirIfNotExists(oldTenant.getTenantCode());
}
if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))) {
//create user in the new tenant directory
String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(), user.getId());
HadoopUtils.getInstance().mkdir(newUserPath);
} else {
// if new tenant dir not exists , create
createTenantDirIfNotExists(newTenant.getTenantCode());
}
}
}
user.setTenantId(tenantId);
}
// updateProcessInstance user
userMapper.updateById(user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete user
*
* @param loginUser login user
* @param id user id
* @return delete result code
* @throws Exception exception when operate hdfs
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteUserById(User loginUser, int id) throws IOException {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
return result;
}
//check exist
User tempUser = userMapper.selectById(id);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
// check if is a project owner
List<Project> projects = projectMapper.queryProjectCreatedByUser(id);
if (CollectionUtils.isNotEmpty(projects)) {
String projectNames = projects.stream().map(Project::getName).collect(Collectors.joining(","));
putMsg(result, Status.TRANSFORM_PROJECT_OWNERSHIP, projectNames);
return result;
}
// delete user
User user = userMapper.queryTenantCodeByUserId(id);
if (user != null) {
if (PropertyUtils.getResUploadStartupState()) {
String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(), id);
if (HadoopUtils.getInstance().exists(userPath)) {
HadoopUtils.getInstance().delete(userPath, true);
}
}
}
accessTokenMapper.deleteAccessTokenByUserId(id);
userMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
//if the selected projectIds are empty, delete all items associated with the user
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) {
projectUserMapper.deleteProjectRelation(0, userId);
return result;
}
String[] projectIdArr = projectIds.split(",");
for (String projectId : projectIdArr) {
Date now = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(Integer.parseInt(projectId));
projectUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
projectUser.setCreateTime(now);
projectUser.setUpdateTime(now);
projectUserMapper.insert(projectUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project by code
*
* @param loginUser login user
* @param userId user id
* @param projectCode project code
* @return grant result code
*/
@Override
public Map<String, Object> grantProjectByCode(final User loginUser, final int userId, final long projectCode) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
// 1. check if user is existed
User tempUser = this.userMapper.selectById(userId);
if (tempUser == null) {
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
// 2. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 3. only project owner can operate
if (!this.hasPerm(loginUser, project.getUserId())) {
this.putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// 4. maintain the relationship between project and user
final Date today = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(project.getId());
projectUser.setPerm(7);
projectUser.setCreateTime(today);
projectUser.setUpdateTime(today);
this.projectUserMapper.insert(projectUser);
this.putMsg(result, Status.SUCCESS);
return result;
}
/**
* revoke the project permission for specified user.
* @param loginUser Login user
* @param userId User id
* @param projectCode Project Code
* @return
*/
@Override
public Map<String, Object> revokeProject(User loginUser, int userId, long projectCode) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
// 1. only admin can operate
if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
// 2. check if user is existed
User user = this.userMapper.selectById(userId);
if (user == null) {
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
// 3. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 4. delete th relationship between project and user
this.projectUserMapper.deleteProjectRelation(project.getId(), user.getId());
this.putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant resource
*
* @param loginUser login user
* @param userId user id
* @param resourceIds resource id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>();
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
Set<Integer> needAuthorizeResIds = new HashSet<>();
if (StringUtils.isNotBlank(resourceIds)) {
String[] resourceFullIdArr = resourceIds.split(",");
// need authorize resource id set
for (String resourceFullId : resourceFullIdArr) {
String[] resourceIdArr = resourceFullId.split("-");
for (int i = 0; i <= resourceIdArr.length - 1; i++) {
int resourceIdValue = Integer.parseInt(resourceIdArr[i]);
needAuthorizeResIds.add(resourceIdValue);
}
}
}
//get the authorized resource id list by user id
List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, Constants.AUTHORIZE_WRITABLE_PERM);
List<Resource> oldAuthorizedRes = CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourceMapper.queryResourceListById(resIds);
//if resource type is UDF,need check whether it is bound by UDF function
Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(Resource::getId).collect(Collectors.toSet());
//get the unauthorized resource id list
oldAuthorizedResIds.removeAll(needAuthorizeResIds);
if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) {
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId);
Map<Integer, Set<Long>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}", resId, resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
}
resourceUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
return result;
}
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Date now = new Date();
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setUserId(userId);
resourcesUser.setResourcesId(resourceIdValue);
if (resource.isDirectory()) {
resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM);
} else {
resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
}
resourcesUser.setCreateTime(now);
resourcesUser.setUpdateTime(now);
resourceUserMapper.insert(resourcesUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>();
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
return result;
}
String[] resourcesIdArr = udfIds.split(",");
for (String udfId : resourcesIdArr) {
Date now = new Date();
UDFUser udfUser = new UDFUser();
udfUser.setUserId(userId);
udfUser.setUdfId(Integer.parseInt(udfId));
udfUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
udfUser.setCreateTime(now);
udfUser.setUpdateTime(now);
udfUserMapper.insert(udfUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
datasourceUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) {
return result;
}
String[] datasourceIdArr = datasourceIds.split(",");
for (String datasourceId : datasourceIdArr) {
Date now = new Date();
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setUserId(userId);
datasourceUser.setDatasourceId(Integer.parseInt(datasourceId));
datasourceUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
datasourceUser.setCreateTime(now);
datasourceUser.setUpdateTime(now);
datasourceUserMapper.insert(datasourceUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user info
*
* @param loginUser login user
* @return user info
*/
@Override
public Map<String, Object> getUserInfo(User loginUser) {
Map<String, Object> result = new HashMap<>();
User user = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) {
user = loginUser;
} else {
user = userMapper.queryDetailsById(loginUser.getId());
List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId());
StringBuilder sb = new StringBuilder();
if (alertGroups != null && !alertGroups.isEmpty()) {
for (int i = 0; i < alertGroups.size() - 1; i++) {
sb.append(alertGroups.get(i).getGroupName() + ",");
}
sb.append(alertGroups.get(alertGroups.size() - 1));
user.setAlertGroup(sb.toString());
}
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
@Override
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryAllGeneralUser();
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
@Override
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify user name exists
*
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
@Override
public Result<Object> verifyUserName(String userName) {
Result<Object> result = new Result<>();
User user = userMapper.queryByUserNameAccurately(userName);
if (user != null) {
putMsg(result, Status.USER_NAME_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* unauthorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
@Override
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null);
List<User> resultUsers = new ArrayList<>();
Set<User> userSet = null;
if (userList != null && !userList.isEmpty()) {
userSet = new HashSet<>(userList);
List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId);
Set<User> authedUserSet = null;
if (authedUserList != null && !authedUserList.isEmpty()) {
authedUserSet = new HashSet<>(authedUserList);
userSet.removeAll(authedUserSet);
}
resultUsers = new ArrayList<>(userSet);
}
result.put(Constants.DATA_LIST, resultUsers);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
@Override
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* @param tenantId tenant id
* @return true if tenant exists, otherwise return false
*/
private boolean checkTenantExists(int tenantId) {
return tenantMapper.queryById(tenantId) != null;
}
/**
* @return if check failed return the field, otherwise return null
*/
private String checkUserParams(String userName, String password, String email, String phone) {
String msg = null;
if (!CheckUtils.checkUserName(userName)) {
msg = userName;
} else if (!CheckUtils.checkPassword(password)) {
msg = password;
} else if (!CheckUtils.checkEmail(email)) {
msg = email;
} else if (!CheckUtils.checkPhone(phone)) {
msg = phone;
}
return msg;
}
/**
* copy resource files
*
* @param resourceComponent resource component
* @param srcBasePath src base path
* @param dstBasePath dst base path
* @throws IOException io exception
*/
private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException {
List<ResourceComponent> components = resourceComponent.getChildren();
if (CollectionUtils.isNotEmpty(components)) {
for (ResourceComponent component : components) {
// verify whether exist
if (!HadoopUtils.getInstance().exists(String.format("%s/%s", srcBasePath, component.getFullName()))) {
logger.error("resource file: {} not exist,copy error", component.getFullName());
throw new ServiceException(Status.RESOURCE_NOT_EXIST);
}
if (!component.isDirctory()) {
// copy it to dst
HadoopUtils.getInstance().copy(String.format("%s/%s", srcBasePath, component.getFullName()), String.format("%s/%s", dstBasePath, component.getFullName()), false, true);
continue;
}
if (CollectionUtils.isEmpty(component.getChildren())) {
// if not exist,need create it
if (!HadoopUtils.getInstance().exists(String.format("%s/%s", dstBasePath, component.getFullName()))) {
HadoopUtils.getInstance().mkdir(String.format("%s/%s", dstBasePath, component.getFullName()));
}
} else {
copyResourceFiles(component, srcBasePath, dstBasePath);
}
}
}
}
/**
* registry user, default state is 0, default tenant_id is 1, no phone, no queue
*
* @param userName user name
* @param userPassword user password
* @param repeatPassword repeat password
* @param email email
* @return registry result code
* @throws Exception exception
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> registerUser(String userName, String userPassword, String repeatPassword, String email) {
Map<String, Object> result = new HashMap<>();
//check user params
String msg = this.checkUserParams(userName, userPassword, email, "");
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg);
return result;
}
if (!userPassword.equals(repeatPassword)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same");
return result;
}
User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, user);
return result;
}
/**
* activate user, only system admin have permission, change user state code 0 to 1
*
* @param loginUser login user
* @param userName user name
* @return create result code
*/
@Override
public Map<String, Object> activateUser(User loginUser, String userName) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!CheckUtils.checkUserName(userName)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
if (user.getState() != Flag.NO.ordinal()) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
user.setState(Flag.YES.ordinal());
Date now = new Date();
user.setUpdateTime(now);
userMapper.updateById(user);
User responseUser = userMapper.queryByUserNameAccurately(userName);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, responseUser);
return result;
}
/**
* activate user, only system admin have permission, change users state code 0 to 1
*
* @param loginUser login user
* @param userNames user name
* @return create result code
*/
@Override
public Map<String, Object> batchActivateUser(User loginUser, List<String> userNames) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
int totalSuccess = 0;
List<String> successUserNames = new ArrayList<>();
Map<String, Object> successRes = new HashMap<>();
int totalFailed = 0;
List<Map<String, String>> failedInfo = new ArrayList<>();
Map<String, Object> failedRes = new HashMap<>();
for (String userName : userNames) {
Map<String, Object> tmpResult = activateUser(loginUser, userName);
if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) {
totalFailed++;
Map<String, String> failedBody = new HashMap<>();
failedBody.put("userName", userName);
Status status = (Status) tmpResult.get(Constants.STATUS);
String errorMessage = MessageFormat.format(status.getMsg(), userName);
failedBody.put("msg", errorMessage);
failedInfo.add(failedBody);
} else {
totalSuccess++;
successUserNames.add(userName);
}
}
successRes.put("sum", totalSuccess);
successRes.put("userName", successUserNames);
failedRes.put("sum", totalFailed);
failedRes.put("info", failedInfo);
Map<String, Object> res = new HashMap<>();
res.put("success", successRes);
res.put("failed", failedRes);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, res);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,803 | [Bug][UI Next][V1.0.0-Alpha] An error occurred while opening the task definition page. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
An error occurred while opening the task definition page.
<img width="1822" alt="image" src="https://user-images.githubusercontent.com/97265214/157634610-e824671c-b822-4f2b-8f9d-95d790dddc5f.png">
### What you expected to happen
No error.
### How to reproduce
Open the task definition page
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8803 | https://github.com/apache/dolphinscheduler/pull/8814 | f5711e4fcd744cdab8aefcf8f08a4a951481b83b | 53fcffedb6108930fa32dad854826a5de319858a | "2022-03-10T06:23:59Z" | java | "2022-03-10T11:27:37Z" | dolphinscheduler-ui-next/src/views/projects/task/definition/components/version-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, onMounted, PropType, toRefs, watch } from 'vue'
import Modal from '@/components/modal'
import { NDataTable, NPagination } from 'naive-ui'
import { useI18n } from 'vue-i18n'
import { useVersion } from './use-version'
import styles from './version.module.scss'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
row: {
type: Object as PropType<any>,
default: {}
}
}
const VersionModal = defineComponent({
name: 'VersionModal',
props,
emits: ['confirm', 'refresh'],
setup(props, ctx) {
const { t } = useI18n()
const { variables, getTableData, createColumns } = useVersion()
const requestData = () => {
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page
})
}
onMounted(() => {
variables.taskVersion = props.row?.taskVersion
variables.taskCode = props.row?.taskCode
createColumns(variables)
requestData()
})
watch(
() => props.show,
() => {
variables.taskVersion = props.row?.taskVersion
variables.taskCode = props.row?.taskCode
if (props.show) {
createColumns(variables)
requestData()
}
}
)
watch(
() => variables.refreshTaskDefinition,
() => {
if (variables.refreshTaskDefinition) {
ctx.emit('refresh')
variables.refreshTaskDefinition = false
}
}
)
const onConfirm = () => {
ctx.emit('confirm')
}
return { t, ...toRefs(variables), requestData, onConfirm }
},
render() {
const { t, requestData, onConfirm, show } = this
return (
<Modal
show={show}
title={t('project.task.version')}
cancelShow={false}
onConfirm={onConfirm}
>
<NDataTable columns={this.columns} data={this.tableData} />
<div class={styles.pagination}>
<NPagination
v-model:page={this.page}
v-model:page-size={this.pageSize}
page-count={this.totalPage}
onUpdatePage={requestData}
/>
</div>
</Modal>
)
}
})
export default VersionModal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,778 | [Bug-FE][UI Next][V1.0.0-Alpha]The letters overlap with the input field | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The letters overlap with the input field
<img width="1908" alt="image" src="https://user-images.githubusercontent.com/76080484/157419190-4f8aefed-7c03-4b47-9d6a-56e84ec8bdd4.png">
### What you expected to happen
There is some space between the letters and the input field
### How to reproduce
When saving the workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8778 | https://github.com/apache/dolphinscheduler/pull/8821 | 20dd0c12ce0c723fe1f8a0ffd84017fff7d0db1c | 939f280e7152f2f93f88cfb97f6952a8d87c16c4 | "2022-03-09T10:12:58Z" | java | "2022-03-10T14:10:03Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/dag-save-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, ref, computed, onMounted, watch } from 'vue'
import Modal from '@/components/modal'
import { useI18n } from 'vue-i18n'
import {
NForm,
NFormItem,
NInput,
NSelect,
NSwitch,
NInputNumber,
NDynamicInput,
NCheckbox
} from 'naive-ui'
import { queryTenantList } from '@/service/modules/tenants'
import { SaveForm, WorkflowDefinition } from './types'
import { useRoute } from 'vue-router'
import { verifyName } from '@/service/modules/process-definition'
import './x6-style.scss'
import { positiveIntegerRegex } from '@/utils/regex'
const props = {
visible: {
type: Boolean as PropType<boolean>,
default: false
},
// If this prop is passed, it means from definition detail
definition: {
type: Object as PropType<WorkflowDefinition>,
default: undefined
}
}
interface Tenant {
tenantCode: string
id: number
}
export default defineComponent({
name: 'dag-save-modal',
props,
emits: ['update:show', 'save'],
setup(props, context) {
const route = useRoute()
const { t } = useI18n()
const projectCode = Number(route.params.projectCode)
const tenants = ref<Tenant[]>([])
const tenantsDropdown = computed(() => {
if (tenants.value) {
return tenants.value
.map((t) => ({
label: t.tenantCode,
value: t.tenantCode
}))
.concat({ label: 'default', value: 'default' })
}
return []
})
onMounted(() => {
queryTenantList().then((res: any) => {
tenants.value = res
})
})
const formValue = ref<SaveForm>({
name: '',
description: '',
tenantCode: 'default',
timeoutFlag: false,
timeout: 0,
globalParams: [],
release: false
})
const formRef = ref()
const rule = {
name: {
required: true,
message: t('project.dag.dag_name_empty')
},
timeout: {
validator() {
if (
formValue.value.timeoutFlag &&
!positiveIntegerRegex.test(String(formValue.value.timeout))
) {
return new Error(t('project.dag.positive_integer'))
}
}
},
globalParams: {
validator() {
const props = new Set()
for (const param of formValue.value.globalParams) {
const prop = param.value
if (!prop) {
return new Error(t('project.dag.prop_empty'))
}
if (props.has(prop)) {
return new Error(t('project.dag.prop_repeat'))
}
props.add(prop)
}
}
}
}
const onSubmit = () => {
formRef.value.validate(async (valid: any) => {
if (!valid) {
const params = {
name: formValue.value.name
}
if (
props.definition?.processDefinition.name !== formValue.value.name
) {
verifyName(params, projectCode)
.then(() => context.emit('save', formValue.value))
} else {
context.emit('save', formValue.value)
}
}
})
}
const onCancel = () => {
context.emit('update:show', false)
}
const updateModalData = () => {
const process = props.definition?.processDefinition
if (process) {
formValue.value.name = process.name
formValue.value.description = process.description
formValue.value.tenantCode = process.tenantCode || 'default'
if (process.timeout && process.timeout > 0) {
formValue.value.timeoutFlag = true
formValue.value.timeout = process.timeout
}
formValue.value.globalParams = process.globalParamList.map((param) => ({
key: param.prop,
value: param.value
}))
}
}
onMounted(() => updateModalData())
watch(
() => props.definition?.processDefinition,
() => updateModalData()
)
return () => (
<Modal
show={props.visible}
title={t('project.dag.basic_info')}
onConfirm={onSubmit}
onCancel={onCancel}
autoFocus={false}
>
<NForm
label-width='100'
model={formValue.value}
rules={rule}
size='medium'
label-placement='left'
ref={formRef}
>
<NFormItem label={t('project.dag.workflow_name')} path='name'>
<NInput v-model:value={formValue.value.name} />
</NFormItem>
<NFormItem label={t('project.dag.description')} path='description'>
<NInput
type='textarea'
v-model:value={formValue.value.description}
/>
</NFormItem>
<NFormItem label={t('project.dag.tenant')} path='tenantCode'>
<NSelect
options={tenantsDropdown.value}
v-model:value={formValue.value.tenantCode}
/>
</NFormItem>
<NFormItem label={t('project.dag.timeout_alert')} path='timeoutFlag'>
<NSwitch v-model:value={formValue.value.timeoutFlag} />
</NFormItem>
{formValue.value.timeoutFlag && (
<NFormItem label=' ' path='timeout'>
<NInputNumber
v-model:value={formValue.value.timeout}
show-button={false}
min={0}
v-slots={{
suffix: () => '分'
}}
></NInputNumber>
</NFormItem>
)}
<NFormItem
label={t('project.dag.global_variables')}
path='globalParams'
>
<NDynamicInput
v-model:value={formValue.value.globalParams}
preset='pair'
key-placeholder={t('project.dag.key')}
value-placeholder={t('project.dag.value')}
/>
</NFormItem>
{props.definition && (
<NFormItem label=' ' path='timeoutFlag'>
<NCheckbox v-model:checked={formValue.value.release}>
{t('project.dag.online_directly')}
</NCheckbox>
</NFormItem>
)}
</NForm>
</Modal>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,809 | [Bug-FE][UI Next][V1.0.0-Alpha] DATA_QUALITY icon is null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
DATA_QUALITY icon is null
<img width="1902" alt="image" src="https://user-images.githubusercontent.com/76080484/157627147-8fab0a8a-8854-42b1-97f5-71ad7bdd3132.png">
### What you expected to happen
DATA_QUALITY icon According to the normal
### How to reproduce
Click Create Workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8809 | https://github.com/apache/dolphinscheduler/pull/8822 | 939f280e7152f2f93f88cfb97f6952a8d87c16c4 | a9c9eb00d09722069f02c56cd5974178894d22e4 | "2022-03-10T08:59:03Z" | java | "2022-03-11T02:00:30Z" | dolphinscheduler-ui-next/src/assets/images/task-icons/data_quality.png | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,809 | [Bug-FE][UI Next][V1.0.0-Alpha] DATA_QUALITY icon is null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
DATA_QUALITY icon is null
<img width="1902" alt="image" src="https://user-images.githubusercontent.com/76080484/157627147-8fab0a8a-8854-42b1-97f5-71ad7bdd3132.png">
### What you expected to happen
DATA_QUALITY icon According to the normal
### How to reproduce
Click Create Workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8809 | https://github.com/apache/dolphinscheduler/pull/8822 | 939f280e7152f2f93f88cfb97f6952a8d87c16c4 | a9c9eb00d09722069f02c56cd5974178894d22e4 | "2022-03-10T08:59:03Z" | java | "2022-03-11T02:00:30Z" | dolphinscheduler-ui-next/src/assets/images/task-icons/data_quality_hover.png | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,809 | [Bug-FE][UI Next][V1.0.0-Alpha] DATA_QUALITY icon is null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
DATA_QUALITY icon is null
<img width="1902" alt="image" src="https://user-images.githubusercontent.com/76080484/157627147-8fab0a8a-8854-42b1-97f5-71ad7bdd3132.png">
### What you expected to happen
DATA_QUALITY icon According to the normal
### How to reproduce
Click Create Workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8809 | https://github.com/apache/dolphinscheduler/pull/8822 | 939f280e7152f2f93f88cfb97f6952a8d87c16c4 | a9c9eb00d09722069f02c56cd5974178894d22e4 | "2022-03-10T08:59:03Z" | java | "2022-03-11T02:00:30Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/dag-sidebar.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent } from 'vue'
import {
TaskType,
TASK_TYPES_MAP
} from '@/views/projects/task/constants/task-type'
import Styles from './dag.module.scss'
export default defineComponent({
name: 'workflow-dag-sidebar',
emits: ['dragStart'],
setup(props, context) {
const allTaskTypes = Object.keys(TASK_TYPES_MAP).map((type) => ({
type,
...TASK_TYPES_MAP[type as TaskType]
}))
return () => (
<div class={Styles.sidebar}>
{allTaskTypes.map((task) => (
<div
class={Styles.draggable}
draggable='true'
onDragstart={(e) => {
context.emit('dragStart', e, task.type as TaskType)
}}
>
<em
class={[
Styles['sidebar-icon'],
Styles['icon-' + task.type.toLocaleLowerCase()]
]}
></em>
<span>{task.alias}</span>
</div>
))}
</div>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,809 | [Bug-FE][UI Next][V1.0.0-Alpha] DATA_QUALITY icon is null | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
DATA_QUALITY icon is null
<img width="1902" alt="image" src="https://user-images.githubusercontent.com/76080484/157627147-8fab0a8a-8854-42b1-97f5-71ad7bdd3132.png">
### What you expected to happen
DATA_QUALITY icon According to the normal
### How to reproduce
Click Create Workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8809 | https://github.com/apache/dolphinscheduler/pull/8822 | 939f280e7152f2f93f88cfb97f6952a8d87c16c4 | a9c9eb00d09722069f02c56cd5974178894d22e4 | "2022-03-10T08:59:03Z" | java | "2022-03-11T02:00:30Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/dag.module.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
$blue: #288fff;
$blueBg: rgba(40, 143, 255, 0.1);
$toolbarHeight: 50px;
$borderDark: rgba(255, 255, 255, 0.09);
$borderLight: rgb(239, 239, 245);
$bgDark: rgb(24, 24, 28);
$bgLight: #ffffff;
.dag {
height: 100%;
overflow: hidden;
}
.content {
display: flex;
height: calc(100% - $toolbarHeight - 20px);
margin-top: 20px;
}
.toolbar {
height: $toolbarHeight;
display: flex;
align-items: center;
padding: 0 20px;
border-radius: 4px;
justify-content: space-between;
}
.dag-dark .toolbar {
border: 1px solid $borderDark;
}
.dag-light .toolbar-light {
border: 1px solid $borderLight;
}
.canvas {
width: 100%;
height: 100%;
position: relative;
overflow: hidden;
display: flex;
flex: 1;
}
.paper {
width: 100%;
height: 100%;
}
.sidebar {
width: 190px;
height: 100%;
margin-right: 20px;
overflow-y: scroll;
}
.workflow-name {
font-size: 14px;
font-weight: 700;
}
.toolbar-btn {
margin-left: 5px;
}
.draggable {
display: flex;
width: 100%;
height: 32px;
margin-bottom: 10px;
align-items: center;
padding: 0 10px;
border-radius: 4px;
transform: translate(0, 0);
box-sizing: border-box;
cursor: move;
font-size: 12px;
.sidebar-icon {
display: block;
width: 18px;
height: 18px;
background-size: 100% 100%;
margin-right: 10px;
&.icon-shell {
background-image: url('@/assets/images/task-icons/shell.png');
}
&.icon-sub_process {
background-image: url('@/assets/images/task-icons/sub_process.png');
}
&.icon-procedure {
background-image: url('@/assets/images/task-icons/procedure.png');
}
&.icon-sql {
background-image: url('@/assets/images/task-icons/sql.png');
}
&.icon-flink {
background-image: url('@/assets/images/task-icons/flink.png');
}
&.icon-mr {
background-image: url('@/assets/images/task-icons/mr.png');
}
&.icon-python {
background-image: url('@/assets/images/task-icons/python.png');
}
&.icon-dependent {
background-image: url('@/assets/images/task-icons/dependent.png');
}
&.icon-http {
background-image: url('@/assets/images/task-icons/http.png');
}
&.icon-datax {
background-image: url('@/assets/images/task-icons/datax.png');
}
&.icon-pigeon {
background-image: url('@/assets/images/task-icons/pigeon.png');
}
&.icon-sqoop {
background-image: url('@/assets/images/task-icons/sqoop.png');
}
&.icon-conditions {
background-image: url('@/assets/images/task-icons/conditions.png');
}
&.icon-seatunnel {
background-image: url('@/assets/images/task-icons/seatunnel.png');
}
&.icon-spark {
background-image: url('@/assets/images/task-icons/spark.png');
}
&.icon-switch {
background-image: url('@/assets/images/task-icons/switch.png');
}
&.icon-emr {
background-image: url('@/assets/images/task-icons/emr.png');
}
}
&:hover {
.sidebar-icon {
&.icon-shell {
background-image: url('@/assets/images/task-icons/shell_hover.png');
}
&.icon-sub_process {
background-image: url('@/assets/images/task-icons/sub_process_hover.png');
}
&.icon-procedure {
background-image: url('@/assets/images/task-icons/procedure_hover.png');
}
&.icon-sql {
background-image: url('@/assets/images/task-icons/sql_hover.png');
}
&.icon-flink {
background-image: url('@/assets/images/task-icons/flink_hover.png');
}
&.icon-mr {
background-image: url('@/assets/images/task-icons/mr_hover.png');
}
&.icon-python {
background-image: url('@/assets/images/task-icons/python_hover.png');
}
&.icon-dependent {
background-image: url('@/assets/images/task-icons/dependent_hover.png');
}
&.icon-http {
background-image: url('@/assets/images/task-icons/http_hover.png');
}
&.icon-datax {
background-image: url('@/assets/images/task-icons/datax_hover.png');
}
&.icon-pigeon {
background-image: url('@/assets/images/task-icons/pigeon_hover.png');
}
&.icon-sqoop {
background-image: url('@/assets/images/task-icons/sqoop_hover.png');
}
&.icon-conditions {
background-image: url('@/assets/images/task-icons/conditions_hover.png');
}
&.icon-seatunnel {
background-image: url('@/assets/images/task-icons/seatunnel_hover.png');
}
&.icon-spark {
background-image: url('@/assets/images/task-icons/spark_hover.png');
}
&.icon-switch {
background-image: url('@/assets/images/task-icons/switch_hover.png');
}
&.icon-emr {
background-image: url('@/assets/images/task-icons/emr_hover.png');
}
}
}
}
.dag-dark .draggable {
border: 1px solid $borderDark;
}
.dag-light .draggable {
border: 1px solid $borderLight;
}
.dag .draggable {
&:hover {
color: $blue;
border: 1px dashed $blue;
background-color: $blueBg;
}
}
.minimap {
position: absolute;
right: 0px;
bottom: 0px;
border: dashed 1px #e4e4e4;
z-index: 9;
}
.toolbar-left-part {
display: flex;
align-items: center;
flex: 1;
}
.toolbar-right-part {
display: flex;
align-items: center;
.toolbar-right-item {
margin-right: 10px;
}
.node-selector {
width: 0;
overflow: hidden;
transition: all 0.5s;
margin-right: 0;
&.visible {
width: 200px;
margin-right: 10px;
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,602 | [Bug] [k8s] K8s install failed due to external zookeeper config problem | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Installing with kubernetes failed when the external zookeeper configured.
### What you expected to happen
Install successfully without errors.
### How to reproduce
```
cd apache-dolphinscheduler-2.0.3-src/docker/kubernetes/dolphinscheduler
# add external zookeeper config then run
helm install dolphinscheduler .
```
the error message is like:
```
Error: INSTALLATION FAILED: template: dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml:72:16: executing "dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml" at <include "dolphinscheduler.registry.env_vars" .>: error calling include: template: dolphinscheduler/templates/_helpers.tpl:173:19: executing "dolphinscheduler.registry.env_vars" at <.Values.externalRegistry.registryPluginName>: nil pointer evaluating interface {}.registryPluginName
```
### Anything else
I think this commit is incomplete, not changing the counterpart of values.yaml
https://github.com/apache/dolphinscheduler/commit/93f1d4df7d678f0a0d2f650b75ff739a87677109
### Version
2.0.3
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8602 | https://github.com/apache/dolphinscheduler/pull/8604 | 49b1a073dd075f011c931b9a938909c14a41de1f | a38adcb579c730fd92f9a3210379ce4d64ecddc7 | "2022-03-01T06:34:34Z" | java | "2022-03-12T12:51:52Z" | deploy/kubernetes/dolphinscheduler/values.yaml | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Default values for dolphinscheduler-chart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
timezone: "Asia/Shanghai"
image:
registry: "dolphinscheduler.docker.scarf.sh/apache"
tag: "2.0.4-SNAPSHOT"
pullPolicy: "IfNotPresent"
pullSecret: ""
## If not exists external database, by default, Dolphinscheduler's database will use it.
postgresql:
enabled: true
postgresqlUsername: "root"
postgresqlPassword: "root"
postgresqlDatabase: "dolphinscheduler"
persistence:
enabled: false
size: "20Gi"
storageClass: "-"
## If exists external database, and set postgresql.enable value to false.
## external database will be used, otherwise Dolphinscheduler's database will be used.
externalDatabase:
type: "postgresql"
host: "localhost"
port: "5432"
username: "root"
password: "root"
database: "dolphinscheduler"
params: "characterEncoding=utf8"
## If not exists external zookeeper, by default, Dolphinscheduler's zookeeper will use it.
zookeeper:
enabled: true
fourlwCommandsWhitelist: "srvr,ruok,wchs,cons"
persistence:
enabled: false
size: "20Gi"
storageClass: "-"
zookeeperRoot: "/dolphinscheduler"
## If exists external zookeeper, and set zookeeper.enable value to false.
## If zookeeper.enable is false, Dolphinscheduler's zookeeper will use it.
externalZookeeper:
zookeeperQuorum: "127.0.0.1:2181"
zookeeperRoot: "/dolphinscheduler"
common:
## Configmap
configmap:
DOLPHINSCHEDULER_OPTS: ""
DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
RESOURCE_STORAGE_TYPE: "HDFS"
RESOURCE_UPLOAD_PATH: "/dolphinscheduler"
FS_DEFAULT_FS: "file:///"
FS_S3A_ENDPOINT: "s3.xxx.amazonaws.com"
FS_S3A_ACCESS_KEY: "xxxxxxx"
FS_S3A_SECRET_KEY: "xxxxxxx"
HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE: "false"
JAVA_SECURITY_KRB5_CONF_PATH: "/opt/krb5.conf"
LOGIN_USER_KEYTAB_USERNAME: "hdfs@HADOOP.COM"
LOGIN_USER_KEYTAB_PATH: "/opt/hdfs.keytab"
KERBEROS_EXPIRE_TIME: "2"
HDFS_ROOT_USER: "hdfs"
RESOURCE_MANAGER_HTTPADDRESS_PORT: "8088"
YARN_RESOURCEMANAGER_HA_RM_IDS: ""
YARN_APPLICATION_STATUS_ADDRESS: "http://ds1:%s/ws/v1/cluster/apps/%s"
YARN_JOB_HISTORY_STATUS_ADDRESS: "http://ds1:19888/ws/v1/history/mapreduce/jobs/%s"
DATASOURCE_ENCRYPTION_ENABLE: "false"
DATASOURCE_ENCRYPTION_SALT: "!@#$%^&*"
SUDO_ENABLE: "true"
# dolphinscheduler env
HADOOP_HOME: "/opt/soft/hadoop"
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME1: "/opt/soft/spark1"
SPARK_HOME2: "/opt/soft/spark2"
PYTHON_HOME: "/usr/bin/python"
JAVA_HOME: "/usr/local/openjdk-8"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"
DATAX_HOME: "/opt/soft/datax"
## Shared storage persistence mounted into api, master and worker, such as Hadoop, Spark, Flink and DataX binary package
sharedStoragePersistence:
enabled: false
mountPath: "/opt/soft"
accessModes:
- "ReadWriteMany"
## storageClassName must support the access mode: ReadWriteMany
storageClassName: "-"
storage: "20Gi"
## If RESOURCE_STORAGE_TYPE is HDFS and FS_DEFAULT_FS is file:///, fsFileResourcePersistence should be enabled for resource storage
fsFileResourcePersistence:
enabled: false
accessModes:
- "ReadWriteMany"
## storageClassName must support the access mode: ReadWriteMany
storageClassName: "-"
storage: "20Gi"
master:
## PodManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down.
podManagementPolicy: "Parallel"
## Replicas is the desired number of replicas of the given Template.
replicas: "3"
## You can use annotations to attach arbitrary non-identifying metadata to objects.
## Clients such as tools and libraries can retrieve this metadata.
annotations: {}
## Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints.
## More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core
affinity: {}
## NodeSelector is a selector which must be true for the pod to fit on a node.
## Selector which must match a node's labels for the pod to be scheduled on that node.
## More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
nodeSelector: {}
## Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission,
## effectively unioning the set of nodes tolerated by the pod and the RuntimeClass.
tolerations: []
## Compute Resources required by this container. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container
resources: {}
# resources:
# limits:
# memory: "8Gi"
# cpu: "4"
# requests:
# memory: "2Gi"
# cpu: "500m"
## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
livenessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
readinessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## PersistentVolumeClaim represents a reference to a PersistentVolumeClaim in the same namespace.
## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod.
## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template.
## A claim in this list takes precedence over any volumes in the template, with the same name.
persistentVolumeClaim:
enabled: false
accessModes:
- "ReadWriteOnce"
storageClassName: "-"
storage: "20Gi"
env:
JAVA_OPTS: "-Xms1g -Xmx1g -Xmn512m"
MASTER_EXEC_THREADS: "100"
MASTER_EXEC_TASK_NUM: "20"
MASTER_DISPATCH_TASK_NUM: "3"
MASTER_HOST_SELECTOR: "LowerWeight"
MASTER_HEARTBEAT_INTERVAL: "10"
MASTER_TASK_COMMIT_RETRYTIMES: "5"
MASTER_TASK_COMMIT_INTERVAL: "1000"
MASTER_MAX_CPU_LOAD_AVG: "-1"
MASTER_RESERVED_MEMORY: "0.3"
MASTER_FAILOVER_INTERVAL: 10
MASTER_KILL_YARN_JOB_WHEN_HANDLE_FAILOVER: "true"
worker:
## PodManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down.
podManagementPolicy: "Parallel"
## Replicas is the desired number of replicas of the given Template.
replicas: "3"
## You can use annotations to attach arbitrary non-identifying metadata to objects.
## Clients such as tools and libraries can retrieve this metadata.
annotations: {}
## Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints.
## More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core
affinity: {}
## NodeSelector is a selector which must be true for the pod to fit on a node.
## Selector which must match a node's labels for the pod to be scheduled on that node.
## More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
nodeSelector: {}
## Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission,
## effectively unioning the set of nodes tolerated by the pod and the RuntimeClass.
tolerations: []
## Compute Resources required by this container. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container
resources: {}
# resources:
# limits:
# memory: "8Gi"
# cpu: "4"
# requests:
# memory: "2Gi"
# cpu: "500m"
## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
livenessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
readinessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## PersistentVolumeClaim represents a reference to a PersistentVolumeClaim in the same namespace.
## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod.
## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template.
## A claim in this list takes precedence over any volumes in the template, with the same name.
persistentVolumeClaim:
enabled: false
## dolphinscheduler data volume
dataPersistentVolume:
enabled: false
accessModes:
- "ReadWriteOnce"
storageClassName: "-"
storage: "20Gi"
## dolphinscheduler logs volume
logsPersistentVolume:
enabled: false
accessModes:
- "ReadWriteOnce"
storageClassName: "-"
storage: "20Gi"
env:
WORKER_GROUPS_0: default
WORKER_MAX_CPU_LOAD_AVG: "-1"
WORKER_RESERVED_MEMORY: "0.3"
WORKER_EXEC_THREADS: "100"
WORKER_HEARTBEAT_INTERVAL: "10"
WORKER_HOST_WEIGHT: "100"
WORKER_GROUPS: "default"
alert:
## Number of desired pods. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1.
replicas: 1
## The deployment strategy to use to replace existing pods with new ones.
strategy:
type: "RollingUpdate"
rollingUpdate:
maxSurge: "25%"
maxUnavailable: "25%"
## You can use annotations to attach arbitrary non-identifying metadata to objects.
## Clients such as tools and libraries can retrieve this metadata.
annotations: {}
## NodeSelector is a selector which must be true for the pod to fit on a node.
## Selector which must match a node's labels for the pod to be scheduled on that node.
## More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
affinity: {}
## Compute Resources required by this container. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container
nodeSelector: {}
## Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission,
## effectively unioning the set of nodes tolerated by the pod and the RuntimeClass.
tolerations: []
## Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints.
## More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core
resources: {}
# resources:
# limits:
# memory: "2Gi"
# cpu: "1"
# requests:
# memory: "1Gi"
# cpu: "500m"
## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
livenessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
readinessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## PersistentVolumeClaim represents a reference to a PersistentVolumeClaim in the same namespace.
## More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims
persistentVolumeClaim:
enabled: false
accessModes:
- "ReadWriteOnce"
storageClassName: "-"
storage: "20Gi"
env:
JAVA_OPTS: "-Xms512m -Xmx512m -Xmn256m"
api:
## Number of desired pods. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1.
replicas: "1"
## The deployment strategy to use to replace existing pods with new ones.
strategy:
type: "RollingUpdate"
rollingUpdate:
maxSurge: "25%"
maxUnavailable: "25%"
## You can use annotations to attach arbitrary non-identifying metadata to objects.
## Clients such as tools and libraries can retrieve this metadata.
annotations: {}
## NodeSelector is a selector which must be true for the pod to fit on a node.
## Selector which must match a node's labels for the pod to be scheduled on that node.
## More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
affinity: {}
## Compute Resources required by this container. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container
nodeSelector: {}
## Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission,
## effectively unioning the set of nodes tolerated by the pod and the RuntimeClass.
tolerations: []
## Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints.
## More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core
resources: {}
# resources:
# limits:
# memory: "2Gi"
# cpu: "1"
# requests:
# memory: "1Gi"
# cpu: "500m"
## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
livenessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated.
## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
readinessProbe:
enabled: true
initialDelaySeconds: "30"
periodSeconds: "30"
timeoutSeconds: "5"
failureThreshold: "3"
successThreshold: "1"
## PersistentVolumeClaim represents a reference to a PersistentVolumeClaim in the same namespace.
## More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims
persistentVolumeClaim:
enabled: false
accessModes:
- "ReadWriteOnce"
storageClassName: "-"
storage: "20Gi"
service:
## type determines how the Service is exposed. Defaults to ClusterIP. Valid options are ExternalName, ClusterIP, NodePort, and LoadBalancer
type: "ClusterIP"
## clusterIP is the IP address of the service and is usually assigned randomly by the master
clusterIP: ""
## nodePort is the port on each node on which this service is exposed when type=NodePort
nodePort: ""
## externalIPs is a list of IP addresses for which nodes in the cluster will also accept traffic for this service
externalIPs: []
## externalName is the external reference that kubedns or equivalent will return as a CNAME record for this service, requires Type to be ExternalName
externalName: ""
## loadBalancerIP when service.type is LoadBalancer. LoadBalancer will get created with the IP specified in this field
loadBalancerIP: ""
## annotations may need to be set when service.type is LoadBalancer
## service.beta.kubernetes.io/aws-load-balancer-ssl-cert: arn:aws:acm:us-east-1:EXAMPLE_CERT
annotations: {}
env:
JAVA_OPTS: "-Xms512m -Xmx512m -Xmn256m"
ingress:
enabled: false
host: "dolphinscheduler.org"
path: "/dolphinscheduler"
tls:
enabled: false
secretName: "dolphinscheduler-tls"
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | .github/workflows/e2e.yml | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
on:
pull_request:
push:
branches:
- dev
name: E2E
concurrency:
group: e2e-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
name: E2E-Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Sanity Check
uses: ./.github/actions/sanity-check
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-maven-
- name: Build Image
run: |
./mvnw -B clean install \
-Dmaven.test.skip \
-Dmaven.javadoc.skip \
-Dmaven.checkstyle.skip \
-Pdocker,release -Ddocker.tag=ci \
-pl dolphinscheduler-standalone-server -am
- name: Export Docker Images
run: |
docker save apache/dolphinscheduler-standalone-server:ci -o /tmp/standalone-image.tar \
&& du -sh /tmp/standalone-image.tar
- uses: actions/upload-artifact@v2
name: Upload Docker Images
with:
name: standalone-image
path: /tmp/standalone-image.tar
retention-days: 1
e2e:
name: ${{ matrix.case.name }}
needs: build
runs-on: ubuntu-latest
strategy:
matrix:
case:
- name: Tenant
class: org.apache.dolphinscheduler.e2e.cases.TenantE2ETest
- name: User
class: org.apache.dolphinscheduler.e2e.cases.UserE2ETest
- name: WorkerGroup
class: org.apache.dolphinscheduler.e2e.cases.WorkerGroupE2ETest
# - name: Project
# class: org.apache.dolphinscheduler.e2e.cases.ProjectE2ETest
- name: Queue
class: org.apache.dolphinscheduler.e2e.cases.QueueE2ETest
- name: Environment
class: org.apache.dolphinscheduler.e2e.cases.EnvironmentE2ETest
- name: Token
class: org.apache.dolphinscheduler.e2e.cases.TokenE2ETest
# - name: Workflow
# class: org.apache.dolphinscheduler.e2e.cases.WorkflowE2ETest
# - name: WorkflowForSwitch
# class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest
- name: FileManage
class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest
- name: UdfManage
class: org.apache.dolphinscheduler.e2e.cases.UdfManageE2ETest
- name: FunctionManage
class: org.apache.dolphinscheduler.e2e.cases.FunctionManageE2ETest
# - name: MysqlDataSource
# class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest
# - name: ClickhouseDataSource
# class: org.apache.dolphinscheduler.e2e.cases.ClickhouseDataSourceE2ETest
# - name: PostgresDataSource
# class: org.apache.dolphinscheduler.e2e.cases.PostgresDataSourceE2ETest
# - name: SqlServerDataSource
# class: org.apache.dolphinscheduler.e2e.cases.SqlServerDataSourceE2ETest
# - name: HiveDataSource
# class: org.apache.dolphinscheduler.e2e.cases.HiveDataSourceE2ETest
env:
RECORDING_PATH: /tmp/recording-${{ matrix.case.name }}
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-maven-
- uses: actions/download-artifact@v2
name: Download Docker Images
with:
name: standalone-image
path: /tmp
- name: Load Docker Images
run: |
docker load -i /tmp/standalone-image.tar
- name: Run Test
run: |
./mvnw -B -f dolphinscheduler-e2e/pom.xml -am \
-DfailIfNoTests=false \
-Dtest=${{ matrix.case.class }} test
- uses: actions/upload-artifact@v2
if: always()
name: Upload Recording
with:
name: recording-${{ matrix.case.name }}
path: ${{ env.RECORDING_PATH }}
retention-days: 1
result:
name: E2E
runs-on: ubuntu-latest
needs: [ e2e ]
steps:
- name: Status
run: echo "Passed!"
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/datasource-clickhouse/docker-compose.yaml")
public class ClickhouseDataSourceE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String dataSourceType = "CLICKHOUSE";
private static final String dataSourceName = "clickhouse_test";
private static final String dataSourceDescription = "clickhouse_test";
private static final String ip = "clickhouse";
private static final String port = "8123";
private static final String userName = "ch_test";
private static final String pgPassword = "ch_test";
private static final String database = "ch_test";
private static final String jdbcParams = "";
@BeforeAll
public static void setup() {
new LoginPage(browser)
.login(user, password)
.goToNav(DataSourcePage.class);
}
@Test
@Order(10)
void testCreateClickhouseDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams);
new WebDriverWait(page.driver(), 10).until(ExpectedConditions.invisibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
await().untilAsserted(() -> assertThat(page.dataSourceItemsList())
.as("DataSource list should contain newly-created database")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(dataSourceName)));
}
@Test
@Order(20)
void testDeleteClickhouseDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.delete(dataSourceName);
await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.dataSourceItemsList()
).noneMatch(
it -> it.getText().contains(dataSourceName)
);
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/datasource-hive/docker-compose.yaml")
public class HiveDataSourceE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String dataSourceType = "HIVE";
private static final String dataSourceName = "hive_test";
private static final String dataSourceDescription = "hive_test";
private static final String ip = "hive-server";
private static final String port = "10000";
private static final String userName = "hadoop";
private static final String hivePassword = "";
private static final String database = "default";
private static final String jdbcParams = "";
@BeforeAll
public static void setup() {
new LoginPage(browser)
.login(user, password)
.goToNav(DataSourcePage.class);
}
@Test
@Order(10)
void testCreateHiveDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, hivePassword, database, jdbcParams);
new WebDriverWait(page.driver(), 10).until(ExpectedConditions.invisibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
await().untilAsserted(() -> assertThat(page.dataSourceItemsList())
.as("DataSource list should contain newly-created database")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(dataSourceName)));
}
@Test
@Order(20)
void testDeleteHiveDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.delete(dataSourceName);
await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.dataSourceItemsList()
).noneMatch(
it -> it.getText().contains(dataSourceName)
);
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/datasource-mysql/docker-compose.yaml")
public class MysqlDataSourceE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String dataSourceType = "MYSQL";
private static final String dataSourceName = "mysql_test";
private static final String dataSourceDescription = "mysql_test";
private static final String ip = "mysql";
private static final String port = "3306";
private static final String userName = "root";
private static final String mysqlPassword = "123456";
private static final String database = "mysql";
private static final String jdbcParams = "{\"useSSL\": false}";
@BeforeAll
public static void setup() {
new LoginPage(browser)
.login(user, password)
.goToNav(DataSourcePage.class);
}
@Test
@Order(10)
void testCreateMysqlDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, mysqlPassword, database, jdbcParams);
new WebDriverWait(page.driver(), 10).until(ExpectedConditions.invisibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
await().untilAsserted(() -> assertThat(page.dataSourceItemsList())
.as("DataSource list should contain newly-created database")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(dataSourceName)));
}
@Test
@Order(20)
void testDeleteMysqlDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.delete(dataSourceName);
await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.dataSourceItemsList()
).noneMatch(
it -> it.getText().contains(dataSourceName)
);
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/datasource-postgresql/docker-compose.yaml")
public class PostgresDataSourceE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String dataSourceType = "POSTGRESQL";
private static final String dataSourceName = "postgres_test";
private static final String dataSourceDescription = "postgres_test";
private static final String ip = "postgres";
private static final String port = "5432";
private static final String userName = "postgres";
private static final String pgPassword = "postgres";
private static final String database = "postgres";
private static final String jdbcParams = "";
@BeforeAll
public static void setup() {
new LoginPage(browser)
.login(user, password)
.goToNav(DataSourcePage.class);
}
@Test
@Order(10)
void testCreatePostgresDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams);
new WebDriverWait(page.driver(), 10).until(ExpectedConditions.invisibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
await().untilAsserted(() -> assertThat(page.dataSourceItemsList())
.as("DataSource list should contain newly-created database")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(dataSourceName)));
}
@Test
@Order(20)
void testDeletePostgresDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.delete(dataSourceName);
await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.dataSourceItemsList()
).noneMatch(
it -> it.getText().contains(dataSourceName)
);
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/datasource-sqlserver/docker-compose.yaml")
public class SqlServerDataSourceE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String dataSourceType = "SQLSERVER";
private static final String dataSourceName = "sqlserver_test";
private static final String dataSourceDescription = "sqlserver_test";
private static final String ip = "sqlserver";
private static final String port = "1433";
private static final String userName = "sa";
private static final String pgPassword = "OcP2020123";
private static final String database = "master";
private static final String jdbcParams = "";
@BeforeAll
public static void setup() {
new LoginPage(browser)
.login(user, password)
.goToNav(DataSourcePage.class);
}
@Test
@Order(10)
void testCreateSqlServerDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams);
new WebDriverWait(page.driver(), 10).until(ExpectedConditions.invisibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
await().untilAsserted(() -> assertThat(page.dataSourceItemsList())
.as("DataSource list should contain newly-created database")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(dataSourceName)));
}
@Test
@Order(20)
void testDeleteSqlServerDataSource() {
final DataSourcePage page = new DataSourcePage(browser);
page.delete(dataSourceName);
await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.dataSourceItemsList()
).noneMatch(
it -> it.getText().contains(dataSourceName)
);
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,844 | [Feature][E2E] Restore datasource center e2e test cases in ui-next | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Restore datasource center e2e test cases in ui-next
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8844 | https://github.com/apache/dolphinscheduler/pull/8845 | b0ffec5a4ca619f84aa44d1491c44d3fa0d8c7fe | 2335453482df236841bf2147596a89ef359f7036 | "2022-03-13T01:39:02Z" | java | "2022-03-13T04:33:19Z" | dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/datasource/DataSourcePage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.pages.datasource;
import lombok.Getter;
import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage;
import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.FindBys;
import org.openqa.selenium.support.PageFactory;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.Select;
import org.openqa.selenium.support.ui.WebDriverWait;
@Getter
public class DataSourcePage extends NavBarPage implements NavBarPage.NavBarItem {
@FindBy(id = "btnCreateDataSource")
private WebElement buttonCreateDataSource;
@FindBy(className = "data-source-items")
private List<WebElement> dataSourceItemsList;
@FindBys({
@FindBy(className = "el-popconfirm"),
@FindBy(className = "el-button--primary"),
})
private List<WebElement> buttonConfirm;
private final CreateDataSourceForm createDataSourceForm;
public DataSourcePage(RemoteWebDriver driver) {
super(driver);
createDataSourceForm = new CreateDataSourceForm();
}
public DataSourcePage createDataSource(String dataSourceType, String dataSourceName, String dataSourceDescription, String ip, String port, String userName, String password, String database,
String jdbcParams) {
buttonCreateDataSource().click();
createDataSourceForm().btnDataSourceTypeDropdown().click();
new WebDriverWait(driver, 10).until(ExpectedConditions.visibilityOfElementLocated(new By.ById("dialogCreateDataSource")));
createDataSourceForm().selectDataSourceType()
.stream()
.filter(it -> it.getText().contains(dataSourceType.toUpperCase()))
.findFirst()
.orElseThrow(() -> new RuntimeException(String.format("No %s in data source type list", dataSourceType.toUpperCase())))
.click();
createDataSourceForm().inputDataSourceName().sendKeys(dataSourceName);
createDataSourceForm().inputDataSourceDescription().sendKeys(dataSourceDescription);
createDataSourceForm().inputIP().sendKeys(ip);
createDataSourceForm().inputPort().clear();
createDataSourceForm().inputPort().sendKeys(port);
createDataSourceForm().inputUserName().sendKeys(userName);
createDataSourceForm().inputPassword().sendKeys(password);
createDataSourceForm().inputDataBase().sendKeys(database);
if (!"".equals(jdbcParams)) {
createDataSourceForm().inputJdbcParams().sendKeys(jdbcParams);
}
createDataSourceForm().buttonSubmit().click();
return this;
}
public DataSourcePage delete(String name) {
dataSourceItemsList()
.stream()
.filter(it -> it.getText().contains(name))
.flatMap(it -> it.findElements(By.id("btnDelete")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No delete button in data source list"))
.click();
buttonConfirm()
.stream()
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No confirm button when deleting"))
.click();
return this;
}
@Getter
public class CreateDataSourceForm {
CreateDataSourceForm() {
PageFactory.initElements(driver, this);
}
@FindBy(className = "options-datasource-type")
private List<WebElement> selectDataSourceType;
@FindBy(id = "btnDataSourceTypeDropDown")
private WebElement btnDataSourceTypeDropdown;
@FindBy(id = "inputDataSourceName")
private WebElement inputDataSourceName;
@FindBy(id = "inputDataSourceDescription")
private WebElement inputDataSourceDescription;
@FindBy(id = "inputIP")
private WebElement inputIP;
@FindBy(id = "inputPort")
private WebElement inputPort;
@FindBy(id = "inputUserName")
private WebElement inputUserName;
@FindBy(id = "inputPassword")
private WebElement inputPassword;
@FindBy(id = "inputDataBase")
private WebElement inputDataBase;
@FindBy(id = "inputJdbcParams")
private WebElement inputJdbcParams;
@FindBy(id = "btnSubmit")
private WebElement buttonSubmit;
@FindBy(id = "btnCancel")
private WebElement buttonCancel;
@FindBy(id = "btnTestConnection")
private WebElement btnTestConnection;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,758 | [Improvement] [dolphinscheduler-worker] the object 'preTaskCache' is never used, Suggested deletion | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
worker received task execute request command and cache 'TaskExecutionContext',and new Object of 'preTaskCache', but unused. maybe it have other useful?
![image](https://user-images.githubusercontent.com/29919212/157243873-4405aa6f-9132-4f07-acaa-375f16e08aa5.png)
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8758 | https://github.com/apache/dolphinscheduler/pull/8795 | 6514ed2da27ec13775183ec9d7fac12e9132fe16 | 6a5b3bb8f92e41f2cab1ad688430b334bb0873f9 | "2022-03-08T13:10:25Z" | java | "2022-03-14T06:14:21Z" | dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRemoteChannel;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.server.utils.LogUtils;
import org.apache.dolphinscheduler.server.worker.cache.ResponseCache;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.plugin.TaskPluginManager;
import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread;
import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread;
import org.apache.dolphinscheduler.service.alert.AlertClientService;
import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.spi.task.request.TaskRequest;
import java.util.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.google.common.base.Preconditions;
import io.netty.channel.Channel;
/**
* worker request processor
*/
@Component
public class TaskExecuteProcessor implements NettyRequestProcessor {
private static final Logger logger = LoggerFactory.getLogger(TaskExecuteProcessor.class);
/**
* worker config
*/
@Autowired
private WorkerConfig workerConfig;
/**
* task callback service
*/
@Autowired
private TaskCallbackService taskCallbackService;
/**
* alert client service
*/
@Autowired
private AlertClientService alertClientService;
@Autowired
private TaskPluginManager taskPluginManager;
/**
* task execute manager
*/
@Autowired
private WorkerManagerThread workerManager;
/**
* Pre-cache task to avoid extreme situations when kill task. There is no such task in the cache
*
* @param taskExecutionContext task
*/
private void setTaskCache(TaskExecutionContext taskExecutionContext) {
TaskExecutionContext preTaskCache = new TaskExecutionContext();
preTaskCache.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
TaskRequest taskRequest = JSONUtils.parseObject(JSONUtils.toJsonString(taskExecutionContext), TaskRequest.class);
TaskExecutionContextCacheManager.cacheTaskExecutionContext(taskRequest);
}
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.TASK_EXECUTE_REQUEST == command.getType(),
String.format("invalid command type : %s", command.getType()));
TaskExecuteRequestCommand taskRequestCommand = JSONUtils.parseObject(
command.getBody(), TaskExecuteRequestCommand.class);
if (taskRequestCommand == null) {
logger.error("task execute request command is null");
return;
}
logger.info("task execute request command : {}", taskRequestCommand);
String contextJson = taskRequestCommand.getTaskExecutionContext();
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
if (taskExecutionContext == null) {
logger.error("task execution context is null");
return;
}
setTaskCache(taskExecutionContext);
// todo custom logger
taskExecutionContext.setHost(NetUtils.getAddr(workerConfig.getListenPort()));
taskExecutionContext.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext));
if (Constants.DRY_RUN_FLAG_NO == taskExecutionContext.getDryRun()) {
// local execute path
String execLocalPath = getExecLocalPath(taskExecutionContext);
logger.info("task instance local execute path : {}", execLocalPath);
taskExecutionContext.setExecutePath(execLocalPath);
try {
FileUtils.createWorkDirIfAbsent(execLocalPath);
if (CommonUtils.isSudoEnable() && workerConfig.isTenantAutoCreate()) {
OSUtils.createUserIfAbsent(taskExecutionContext.getTenantCode());
}
} catch (Throwable ex) {
logger.error("create execLocalPath: {}", execLocalPath, ex);
TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId());
}
}
taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(),
new NettyRemoteChannel(channel, command.getOpaque()));
// delay task process
long remainTime = DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(), taskExecutionContext.getDelayTime() * 60L);
if (remainTime > 0) {
logger.info("delay the execution of task instance {}, delay time: {} s", taskExecutionContext.getTaskInstanceId(), remainTime);
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION);
taskExecutionContext.setStartTime(null);
} else {
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
taskExecutionContext.setStartTime(new Date());
}
this.doAck(taskExecutionContext);
// submit task to manager
if (!workerManager.offer(new TaskExecuteThread(taskExecutionContext, taskCallbackService, alertClientService, taskPluginManager))) {
logger.info("submit task to manager error, queue is full, queue size is {}", workerManager.getDelayQueueSize());
}
}
private void doAck(TaskExecutionContext taskExecutionContext) {
// tell master that task is in executing
TaskExecuteAckCommand ackCommand = buildAckCommand(taskExecutionContext);
ResponseCache.get().cache(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command(), Event.ACK);
taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command());
}
/**
* build ack command
*
* @param taskExecutionContext taskExecutionContext
* @return TaskExecuteAckCommand
*/
private TaskExecuteAckCommand buildAckCommand(TaskExecutionContext taskExecutionContext) {
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode());
ackCommand.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext));
ackCommand.setHost(taskExecutionContext.getHost());
ackCommand.setStartTime(taskExecutionContext.getStartTime());
if (TaskType.SQL.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType()) || TaskType.PROCEDURE.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType())) {
ackCommand.setExecutePath(null);
} else {
ackCommand.setExecutePath(taskExecutionContext.getExecutePath());
}
taskExecutionContext.setLogPath(ackCommand.getLogPath());
ackCommand.setProcessInstanceId(taskExecutionContext.getProcessInstanceId());
return ackCommand;
}
/**
* get execute local path
*
* @param taskExecutionContext taskExecutionContext
* @return execute local path
*/
private String getExecLocalPath(TaskExecutionContext taskExecutionContext) {
return FileUtils.getProcessExecDir(taskExecutionContext.getProjectCode(),
taskExecutionContext.getProcessDefineCode(),
taskExecutionContext.getProcessDefineVersion(),
taskExecutionContext.getProcessInstanceId(),
taskExecutionContext.getTaskInstanceId());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,802 | [Bug-FE] [UI Next][V1.0.0-Alpha] The Workflow Relation page cannot be zoomed in or out using a scroll wheel | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
The Workflow Relation page cannot be zoomed in or out using a scroll wheel
<img width="1918" alt="image" src="https://user-images.githubusercontent.com/76080484/157601202-a42caa98-ef3b-4180-8ea4-6cb644dcbe3b.png">
### What you expected to happen
The Workflow Relation page is zoomed in or out by scrolling on a scroll wheel
### How to reproduce
The Workflow Relation page cannot be zoomed in or out using a scroll wheel
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8802 | https://github.com/apache/dolphinscheduler/pull/8868 | 1f831405588626cec0f6d27b97695895d0ee7108 | a7dd073e8304d1bcf3436a0606406690007f1b16 | "2022-03-10T06:22:02Z" | java | "2022-03-14T09:07:52Z" | dolphinscheduler-ui-next/src/views/projects/workflow/relation/components/Graph.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, ref } from 'vue'
import initChart from '@/components/chart'
import { useI18n } from 'vue-i18n'
import type { Ref } from 'vue'
import { format } from 'date-fns'
const props = {
height: {
type: [String, Number] as PropType<string | number>,
default: window.innerHeight - 174
},
width: {
type: [String, Number] as PropType<string | number>,
default: '100%'
},
seriesData: {
type: Array as PropType<Array<any>>,
default: () => []
},
labelShow: {
type: Boolean as PropType<boolean>,
default: true
}
}
const GraphChart = defineComponent({
name: 'GraphChart',
props,
setup(props) {
const graphChartRef: Ref<HTMLDivElement | null> = ref(null)
const { t } = useI18n()
const legendData = [
{ name: t('project.workflow.online') },
{ name: t('project.workflow.workflow_offline') },
{ name: t('project.workflow.schedule_offline') }
]
const getCategory = (schedulerStatus: number, workflowStatus: number) => {
switch (true) {
case workflowStatus === 0:
return 1
case workflowStatus === 1 && schedulerStatus === 0:
return 2
case workflowStatus === 1 && schedulerStatus === 1:
default:
return 0
}
}
const option: any = {
tooltip: {
confine: true,
backgroundColor: '#fff',
formatter: (params: any) => {
if (!params.data.name) {
return false
}
const {
name,
scheduleStartTime,
scheduleEndTime,
crontab,
workFlowPublishStatus,
schedulePublishStatus
} = params.data
return `
${t('project.workflow.workflow_name')}:${name}<br/>
${t(
'project.workflow.schedule_start_time'
)}:${scheduleStartTime}<br/>
${t('project.workflow.schedule_end_time')}:${scheduleEndTime}<br/>
${t('project.workflow.crontab_expression')}:${
crontab ? crontab : ' - '
}<br/>
${t(
'project.workflow.workflow_publish_status'
)}:${workFlowPublishStatus}<br/>
${t(
'project.workflow.schedule_publish_status'
)}:${schedulePublishStatus}<br/>
`
}
},
legend: [
{
data: legendData?.map((item) => item.name)
}
],
series: [
{
type: 'graph',
layout: 'force',
draggable: true,
force: {
repulsion: 300,
edgeLength: 100
},
symbol: 'roundRect',
symbolSize: 70,
roam: false,
label: {
show: props.labelShow,
formatter: (val: any) => {
let newStr = ''
const str = val.data.name.split('')
for (let i = 0, s; (s = str[i++]); ) {
newStr += s
if (!(i % 10)) newStr += '\n'
}
return newStr.length > 60 ? newStr.slice(0, 60) + '...' : newStr
}
},
data: props.seriesData.map((item) => {
return {
name: item.name,
id: item.id,
category: getCategory(
Number(item.schedulePublishStatus),
Number(item.workFlowPublishStatus)
),
workFlowPublishStatus: format(
new Date(item.workFlowPublishStatus),
'yyyy-MM-dd HH:mm:ss'
),
schedulePublishStatus: format(
new Date(item.schedulePublishStatus),
'yyyy-MM-dd HH:mm:ss'
),
crontab: item.crontab,
scheduleStartTime:
Number(item.scheduleStartTime) === 0
? t('project.workflow.offline')
: t('project.workflow.online'),
scheduleEndTime:
Number(item.scheduleEndTime) === 0
? t('project.workflow.offline')
: t('project.workflow.online')
}
}),
categories: legendData
}
]
}
initChart(graphChartRef, option)
return { graphChartRef }
},
render() {
const { height, width } = this
return (
<div
ref='graphChartRef'
style={{
height: typeof height === 'number' ? height + 'px' : height,
width: typeof width === 'number' ? width + 'px' : width
}}
/>
)
}
})
export default GraphChart
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,591 | [Feature][python] Add pre-commit to run basic test before commit | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Some of the tests could be run locally before users submit to the remote branch. I think [pre-commit](https://pre-commit.com) is a good component to do that, it will make our workflow more easier
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8591 | https://github.com/apache/dolphinscheduler/pull/8843 | 13b1ffe12ac2d87262a6f0f2cf2bc082b945f47e | a308fa3bef42da19c62c81abc83845f89c3f6d81 | "2022-03-01T03:24:07Z" | java | "2022-03-14T12:51:55Z" | dolphinscheduler-python/pydolphinscheduler/.flake8 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
[flake8]
max-line-length = 110
exclude =
.git,
__pycache__,
.pytest_cache,
*.egg-info,
docs/source/conf.py
old,
build,
dist,
htmlcov,
.tox,
dist,
ignore =
# It's clear and not need to add docstring
D107, # D107: Don't require docstrings on __init__
D105, # D105: Missing docstring in magic method
# Conflict to Black
W503 # W503: Line breaks before binary operators
per-file-ignores =
src/pydolphinscheduler/side/__init__.py:F401
src/pydolphinscheduler/tasks/__init__.py:F401
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,591 | [Feature][python] Add pre-commit to run basic test before commit | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Some of the tests could be run locally before users submit to the remote branch. I think [pre-commit](https://pre-commit.com) is a good component to do that, it will make our workflow more easier
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8591 | https://github.com/apache/dolphinscheduler/pull/8843 | 13b1ffe12ac2d87262a6f0f2cf2bc082b945f47e | a308fa3bef42da19c62c81abc83845f89c3f6d81 | "2022-03-01T03:24:07Z" | java | "2022-03-14T12:51:55Z" | dolphinscheduler-python/pydolphinscheduler/.pre-commit-config.yaml | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,591 | [Feature][python] Add pre-commit to run basic test before commit | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
Some of the tests could be run locally before users submit to the remote branch. I think [pre-commit](https://pre-commit.com) is a good component to do that, it will make our workflow more easier
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8591 | https://github.com/apache/dolphinscheduler/pull/8843 | 13b1ffe12ac2d87262a6f0f2cf2bc082b945f47e | a308fa3bef42da19c62c81abc83845f89c3f6d81 | "2022-03-01T03:24:07Z" | java | "2022-03-14T12:51:55Z" | dolphinscheduler-python/pydolphinscheduler/DEVELOP.md | <!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Develop
pydolphinscheduler is python API for Apache DolphinScheduler, it just defines what workflow look like instead of
store or execute it. We here use [py4j][py4j] to dynamically access Java Virtual Machine.
## Setup Develop Environment
**PyDolphinScheduler** use GitHub to hold all source code, you should clone the code before you do same change.
```shell
git clone git@github.com:apache/dolphinscheduler.git
```
Now, we should install all dependence to make sure we could run test or check code style locally
```shell
cd dolphinscheduler/dolphinscheduler-python/pydolphinscheduler
pip install .[dev]
```
Next, we have to open pydolphinscheduler project in you editor. We recommend you use [pycharm][pycharm]
instead of [IntelliJ IDEA][idea] to open it. And you could just open directory
`dolphinscheduler-python/pydolphinscheduler` instead of `dolphinscheduler-python`.
## Brief Concept
Apache DolphinScheduler is design to define workflow by UI, and pydolphinscheduler try to define it by code. When
define by code, user usually do not care user, tenant, or queue exists or not. All user care about is created
a new workflow by the code his/her definition. So we have some **side object** in `pydolphinscheduler/side`
directory, their only check object exists or not, and create them if not exists.
### Process Definition
pydolphinscheduler workflow object name, process definition is also same name as Java object(maybe would be change to
other word for more simple).
### Tasks
pydolphinscheduler tasks object, we use tasks to define exact job we want DolphinScheduler do for us. For now,
we only support `shell` task to execute shell task. [This link][all-task] list all tasks support in DolphinScheduler
and would be implemented in the further.
## Test Your Code
Linting and tests is very important for open source project, so we pay more attention to it. We have continuous
integration service run by GitHub Action to test whether the patch is good or not, which you could jump to
section [With GitHub Action](#with-github-action) see more detail.
And to make more convenience to local tests, we also have the way to run your [test automated with tox](#automated-testing-with-tox)
locally. It is helpful when your try to find out the detail when continuous integration in GitHub Action failed,
or you have a great patch and want to test local first.
Besides [automated testing with tox](#automated-testing-with-tox) locally, we also have a [manual way](#manually)
run tests. And it is scattered commands to reproduce each step of the integration test we told about.
* Remote
* [With GitHub Action](#with-github-action)
* Local
* [Automated Testing With tox](#automated-testing-with-tox)
* [Manually](#manually)
### With GitHub Action
GitHub Action test in various environment for pydolphinscheduler, including different python version in
`3.6|3.7|3.8|3.9` and operating system `linux|macOS|windows`. It will trigger and run automatically when you
submit pull requests to `apache/dolphinscheduler`.
### Automated Testing With tox
[tox](https://tox.wiki) is a package aims to automate and standardize testing in Python, both our continuous
integration and local test use it to run actual task. To use it, you should install it first
```shell
python -m pip install --upgrade tox
```
After installation, you could run a single command to run all the tests, it is almost like test in GitHub Action
but not so much different environment.
```shell
tox -e local-ci
```
It will take a while when you run it the first time, because it has to install dependencies and make some prepare,
and the next time you run it will be faster.
### Manually
#### Code Style
We use [isort][isort] to automatically keep Python imports alphabetically, and use [Black][black] for code
formatter and [Flake8][flake8] for pep8 checker. If you use [pycharm][pycharm]or [IntelliJ IDEA][idea],
maybe you could follow [Black-integration][black-editor] to configure them in your environment.
Our Python API CI would automatically run code style checker and unittest when you submit pull request in
GitHub, you could also run static check locally.
```shell
# We recommend you run isort and Black before Flake8, because Black could auto fix some code style issue
# but Flake8 just hint when code style not match pep8
# Run Isort
python -m isort .
# Run Black
python -m black .
# Run Flake8
python -m flake8
```
#### Testing
pydolphinscheduler using [pytest][pytest] to run all tests in directory `tests`. You could run tests by the commands
```shell
python -m pytest --cov=pydolphinscheduler --cov-config=.coveragerc tests/
```
Besides run tests, it will also check the unit test [coverage][coverage] threshold, for now when test cover less than 90%
will fail the coverage, as well as our GitHub Action.
The command above will check test coverage automatically, and you could also test the coverage by command.
```shell
python -m coverage run && python -m coverage report
```
It would not only run unit test but also show each file coverage which cover rate less than 100%, and `TOTAL`
line show you total coverage of you code. If your CI failed with coverage you could go and find some reason by
this command output.
## Add LICENSE When New Dependencies Adding
When you add a new package in pydolphinscheduler, you should also add the package's LICENSE to directory
`dolphinscheduler-dist/release-docs/licenses/python-api-licenses`, and also add a short description to
`dolphinscheduler-dist/release-docs/LICENSE`.
## Update `UPDATING.md` when public class, method or interface is be changed
When you change public class, method or interface, you should change the [UPDATING.md](./UPDATING.md) to notice
users who may use it in other way.
<!-- content -->
[py4j]: https://www.py4j.org/index.html
[pycharm]: https://www.jetbrains.com/pycharm
[idea]: https://www.jetbrains.com/idea/
[all-task]: https://dolphinscheduler.apache.org/en-us/docs/dev/user_doc/guide/task/shell.html
[pytest]: https://docs.pytest.org/en/latest/
[black]: https://black.readthedocs.io/en/stable/index.html
[flake8]: https://flake8.pycqa.org/en/latest/index.html
[black-editor]: https://black.readthedocs.io/en/stable/integrations/editors.html#pycharm-intellij-idea
[coverage]: https://coverage.readthedocs.io/en/stable/
[isort]: https://pycqa.github.io/isort/index.html
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,530 | [Feature][UI] Custom timezone | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
1. Add the timezone selete on the right header, when user change timezone info, call the updateUser interface to update.
![image](https://user-images.githubusercontent.com/11962619/155515796-3f7597ca-c694-4c08-a98f-858a8d335654.png)
2. DAG scheduler time optimization.
![e65bb495e126392dcf7a1eb594fe295](https://user-images.githubusercontent.com/11962619/155515887-adfa55a3-bad8-4a4f-864c-d5294a57eb7a.png)
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8530 | https://github.com/apache/dolphinscheduler/pull/8885 | a308fa3bef42da19c62c81abc83845f89c3f6d81 | d19322dc9e1c80c6c3f5d46b46044c4fa4d140e1 | "2022-02-24T11:29:31Z" | java | "2022-03-14T14:14:19Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/timing/use-table.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { h, ref, reactive } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router'
import { NSpace, NTooltip, NButton, NPopconfirm, NEllipsis } from 'naive-ui'
import {
deleteScheduleById,
offline,
online,
queryScheduleListPaging
} from '@/service/modules/schedules'
import {
ArrowDownOutlined,
ArrowUpOutlined,
DeleteOutlined,
EditOutlined
} from '@vicons/antd'
import type { Router } from 'vue-router'
import { ISearchParam } from './types'
import styles from '../index.module.scss'
export function useTable() {
const { t } = useI18n()
const router: Router = useRouter()
const variables = reactive({
columns: [],
row: {},
tableData: [],
projectCode: ref(Number(router.currentRoute.value.params.projectCode)),
page: ref(1),
pageSize: ref(10),
searchVal: ref(),
totalPage: ref(1),
showRef: ref(false)
})
const createColumns = (variables: any) => {
variables.columns = [
{
title: '#',
key: 'id',
width: 50,
render: (row: any, index: number) => index + 1
},
{
title: t('project.workflow.workflow_name'),
key: 'processDefinitionName',
width: 200,
render: (row: any) =>
h(
NEllipsis,
{ style: 'max-width: 200px' },
{
default: () => row.processDefinitionName
}
)
},
{
title: t('project.workflow.start_time'),
key: 'startTime'
},
{
title: t('project.workflow.end_time'),
key: 'endTime'
},
{
title: t('project.workflow.crontab'),
key: 'crontab'
},
{
title: t('project.workflow.failure_strategy'),
key: 'failureStrategy'
},
{
title: t('project.workflow.status'),
key: 'releaseState',
render: (row: any) =>
row.releaseState === 'ONLINE'
? t('project.workflow.up_line')
: t('project.workflow.down_line')
},
{
title: t('project.workflow.create_time'),
key: 'createTime'
},
{
title: t('project.workflow.update_time'),
key: 'updateTime'
},
{
title: t('project.workflow.operation'),
key: 'operation',
fixed: 'right',
className: styles.operation,
render: (row: any) => {
return h(NSpace, null, {
default: () => [
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'info',
size: 'small',
disabled: row.releaseState === 'ONLINE',
onClick: () => {
handleEdit(row)
}
},
{
icon: () => h(EditOutlined)
}
),
default: () => t('project.workflow.edit')
}
),
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type:
row.releaseState === 'ONLINE' ? 'error' : 'warning',
size: 'small',
onClick: () => {
handleReleaseState(row)
}
},
{
icon: () =>
h(
row.releaseState === 'ONLINE'
? ArrowDownOutlined
: ArrowUpOutlined
)
}
),
default: () =>
row.releaseState === 'ONLINE'
? t('project.workflow.down_line')
: t('project.workflow.up_line')
}
),
h(
NPopconfirm,
{
onPositiveClick: () => {
handleDelete(row.id)
}
},
{
trigger: () =>
h(
NTooltip,
{},
{
trigger: () =>
h(
NButton,
{
circle: true,
type: 'error',
size: 'small'
},
{
icon: () => h(DeleteOutlined)
}
),
default: () => t('project.workflow.delete')
}
),
default: () => t('project.workflow.delete_confirm')
}
)
]
})
}
}
]
}
const handleEdit = (row: any) => {
variables.showRef = true
variables.row = row
}
const getTableData = (params: ISearchParam) => {
const definitionCode = Number(
router.currentRoute.value.params.definitionCode
)
queryScheduleListPaging(
{ ...params, processDefinitionCode: definitionCode },
variables.projectCode
).then((res: any) => {
variables.totalPage = res.totalPage
variables.tableData = res.totalList.map((item: any) => {
return { ...item }
})
})
}
const handleReleaseState = (row: any) => {
let handle = online
if (row.releaseState === 'ONLINE') {
handle = offline
}
handle(variables.projectCode, row.id).then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
}
const handleDelete = (id: number) => {
/* after deleting data from the current page, you need to jump forward when the page is empty. */
if (variables.tableData.length === 1 && variables.page > 1) {
variables.page -= 1
}
deleteScheduleById(id, variables.projectCode).then(() => {
window.$message.success(t('project.workflow.success'))
getTableData({
pageSize: variables.pageSize,
pageNo: variables.page,
searchVal: variables.searchVal
})
})
}
return {
variables,
createColumns,
getTableData
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_FILE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZE_RESOURCE_TREE;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_RESOURCE_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_BY_TYPE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_PAGING;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_FILE_IS_EMPTY;
import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_NOT_EXIST;
import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_UDF_FUNCTION_NAME_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_UDF_FUNCTION_ERROR;
import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.commons.lang.StringUtils;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
* resources controller
*/
@Api(tags = "RESOURCES_TAG")
@RestController
@RequestMapping("resources")
public class ResourcesController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class);
@Autowired
private ResourcesService resourceService;
@Autowired
private UdfFuncService udfFuncService;
/**
* @param loginUser login user
* @param type type
* @param alias alias
* @param description description
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/
@ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping(value = "/directory")
@ApiException(CREATE_RESOURCE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result createDirectory(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir) {
return resourceService.createDirectory(loginUser, alias, description, type, pid, currentDir);
}
/**
* create resource
*
* @return create result code
*/
@ApiOperation(value = "createResource", notes = "CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping()
@ApiException(CREATE_RESOURCE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result createResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam("file") MultipartFile file,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir) {
return resourceService.createResource(loginUser, alias, description, type, file, pid, currentDir);
}
/**
* update resource
*
* @param loginUser login user
* @param alias alias
* @param resourceId resource id
* @param type resource type
* @param description description
* @param file resource file
* @return update result code
*/
@ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile")
})
@PutMapping(value = "/{id}")
@ApiException(UPDATE_RESOURCE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result updateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int resourceId,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "file", required = false) MultipartFile file) {
return resourceService.updateResource(loginUser, resourceId, alias, description, type, file);
}
/**
* query resources list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType")
})
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type
) {
Map<String, Object> result = resourceService.queryResourceList(loginUser, type);
return returnDataList(result);
}
/**
* query resources list paging
*
* @param loginUser login user
* @param type resource type
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return resource list page
*/
@ApiOperation(value = "queryResourceListPaging", notes = "QUERY_RESOURCE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "int", example = "10"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20")
})
@GetMapping()
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_PAGING)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryResourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "id") int id,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
Result result = checkPageParams(pageNo, pageSize);
if (!result.checkResult()) {
return result;
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = resourceService.queryResourceListPaging(loginUser, id, type, searchVal, pageNo, pageSize);
return result;
}
/**
* delete resource
*
* @param loginUser login user
* @param resourceId resource id
* @return delete result code
*/
@ApiOperation(value = "deleteResource", notes = "DELETE_RESOURCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@DeleteMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_RESOURCE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result deleteResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int resourceId
) throws Exception {
return resourceService.delete(loginUser, resourceId);
}
/**
* verify resource by alias and type
*
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @return true if the resource name not exists, otherwise return false
*/
@ApiOperation(value = "verifyResourceName", notes = "VERIFY_RESOURCE_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String")
})
@GetMapping(value = "/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result verifyResourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "fullName") String fullName,
@RequestParam(value = "type") ResourceType type
) {
return resourceService.verifyResourceName(fullName, type, loginUser);
}
/**
* query resources by type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryResourceByProgramType", notes = "QUERY_RESOURCE_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType")
})
@GetMapping(value = "/query-by-type")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "programType", required = false) ProgramType programType
) {
Map<String, Object> result = resourceService.queryResourceByProgramType(loginUser, type, programType);
return returnDataList(result);
}
/**
* query resource by full name and type
*
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @param id resource id
* @return true if the resource name not exists, otherwise return false
*/
@ApiOperation(value = "queryResource", notes = "QUERY_BY_RESOURCE_NAME")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = false, dataType = "Int", example = "10")
})
@GetMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(RESOURCE_NOT_EXIST)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result queryResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "fullName", required = false) String fullName,
@PathVariable(value = "id", required = false) Integer id,
@RequestParam(value = "type") ResourceType type
) {
return resourceService.queryResource(fullName, id, type);
}
/**
* view resource file online
*
* @param loginUser login user
* @param resourceId resource id
* @param skipLineNum skip line number
* @param limit limit
* @return resource content
*/
@ApiOperation(value = "viewResource", notes = "VIEW_RESOURCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/{id}/view")
@ApiException(VIEW_RESOURCE_FILE_ON_LINE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result viewResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int resourceId,
@RequestParam(value = "skipLineNum") int skipLineNum,
@RequestParam(value = "limit") int limit
) {
return resourceService.readResource(resourceId, skipLineNum, limit);
}
/**
* create resource file online
*
* @return create result code
*/
@ApiOperation(value = "onlineCreateResource", notes = "ONLINE_CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping(value = "/online-create")
@ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result onlineCreateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "fileName") String fileName,
@RequestParam(value = "suffix") String fileSuffix,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "content") String content,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir
) {
if (StringUtils.isEmpty(content)) {
logger.error("resource file contents are not allowed to be empty");
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, description, content, pid, currentDir);
}
/**
* edit resource file online
*
* @param loginUser login user
* @param resourceId resource id
* @param content content
* @return update result code
*/
@ApiOperation(value = "updateResourceContent", notes = "UPDATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String")
})
@PutMapping(value = "/{id}/update-content")
@ApiException(EDIT_RESOURCE_FILE_ON_LINE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result updateResourceContent(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int resourceId,
@RequestParam(value = "content") String content
) {
if (StringUtils.isEmpty(content)) {
logger.error("The resource file contents are not allowed to be empty");
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.updateResourceContent(resourceId, content);
}
/**
* download resource file
*
* @param loginUser login user
* @param resourceId resource id
* @return resource content
*/
@ApiOperation(value = "downloadResource", notes = "DOWNLOAD_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/{id}/download")
@ResponseBody
@ApiException(DOWNLOAD_RESOURCE_FILE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public ResponseEntity downloadResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int resourceId) throws Exception {
Resource file = resourceService.downloadResource(resourceId);
if (file == null) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg());
}
return ResponseEntity
.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"")
.body(file);
}
/**
* create udf function
*
* @param loginUser login user
* @param type udf type
* @param funcName function name
* @param argTypes argument types
* @param database database
* @param description description
* @param className class name
* @param resourceId resource id
* @return create result code
*/
@ApiOperation(value = "createUdfFunc", notes = "CREATE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"),
@ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "className", value = "CLASS_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"),
@ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/{resourceId}/udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_UDF_FUNCTION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result createUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description,
@PathVariable(value = "resourceId") int resourceId) {
return udfFuncService.createUdfFunction(loginUser, funcName, className, argTypes, database, description, type, resourceId);
}
/**
* view udf function
*
* @param loginUser login user
* @param id resource id
* @return udf function detail
*/
@ApiOperation(value = "viewUIUdfFunction", notes = "VIEW_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/{id}/udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(VIEW_UDF_FUNCTION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result viewUIUdfFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("id") int id) {
Map<String, Object> map = udfFuncService.queryUdfFuncDetail(id);
return returnDataList(map);
}
/**
* update udf function
*
* @param loginUser login user
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param description description
* @param resourceId resource id
* @param className class name
* @param udfFuncId udf function id
* @return update result code
*/
@ApiOperation(value = "updateUdfFunc", notes = "UPDATE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "UDF_ID", required = true, dataType = "Int"),
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"),
@ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "className", value = "CLASS_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"),
@ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@PutMapping(value = "/{resourceId}/udf-func/{id}")
@ApiException(UPDATE_UDF_FUNCTION_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result updateUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int udfFuncId,
@RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description,
@PathVariable(value = "resourceId") int resourceId) {
Map<String, Object> result = udfFuncService.updateUdfFunc(udfFuncId, funcName, className, argTypes, database, description, type, resourceId);
return returnDataList(result);
}
/**
* query udf function list paging
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return udf function list page
*/
@ApiOperation(value = "queryUdfFuncListPaging", notes = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20")
})
@GetMapping(value = "/udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<Object> queryUdfFuncListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
Result result = checkPageParams(pageNo, pageSize);
if (!result.checkResult()) {
return result;
}
result = udfFuncService.queryUdfFuncListPaging(loginUser, searchVal, pageNo, pageSize);
return result;
}
/**
* query udf func list by type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryUdfFuncList", notes = "QUERY_UDF_FUNC_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType")
})
@GetMapping(value = "/udf-func/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR)
@AccessLogAnnotation(ignoreRequestArgs = "loginUser")
public Result<Object> queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") UdfType type) {
Map<String, Object> result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal());
return returnDataList(result);
}
/**
* verify udf function name can use or not
*
* @param loginUser login user
* @param name name
* @return true if the name can user, otherwise return false
*/
@ApiOperation(value = "verifyUdfFuncName", notes = "VERIFY_UDF_FUNCTION_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "FUNC_NAME", required = true, dataType = "String")
})
@GetMapping(value = "/udf-func/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_UDF_FUNCTION_NAME_ERROR)
@AccessLogAnnotation
public Result verifyUdfFuncName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "name") String name
) {
return udfFuncService.verifyUdfFuncByName(name);
}
/**
* delete udf function
*
* @param loginUser login user
* @param udfFuncId udf function id
* @return delete result code
*/
@ApiOperation(value = "deleteUdfFunc", notes = "DELETE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "UDF_FUNC_ID", required = true, dataType = "Int", example = "100")
})
@DeleteMapping(value = "/udf-func/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_UDF_FUNCTION_ERROR)
@AccessLogAnnotation
public Result deleteUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int udfFuncId
) {
return udfFuncService.delete(udfFuncId);
}
/**
* authorized file resource list
*
* @param loginUser login user
* @param userId user id
* @return authorized result
*/
@ApiOperation(value = "authorizedFile", notes = "AUTHORIZED_FILE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-file")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZED_FILE_RESOURCE_ERROR)
@AccessLogAnnotation
public Result authorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
Map<String, Object> result = resourceService.authorizedFile(loginUser, userId);
return returnDataList(result);
}
/**
* unauthorized file resource list
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@ApiOperation(value = "authorizeResourceTree", notes = "AUTHORIZE_RESOURCE_TREE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-resource-tree")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZE_RESOURCE_TREE)
@AccessLogAnnotation
public Result authorizeResourceTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
Map<String, Object> result = resourceService.authorizeResourceTree(loginUser, userId);
return returnDataList(result);
}
/**
* unauthorized udf function
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@ApiOperation(value = "unauthUDFFunc", notes = "UNAUTHORIZED_UDF_FUNC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/unauth-udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(UNAUTHORIZED_UDF_FUNCTION_ERROR)
@AccessLogAnnotation
public Result unauthUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
Map<String, Object> result = resourceService.unauthorizedUDFFunction(loginUser, userId);
return returnDataList(result);
}
/**
* authorized udf function
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
@ApiOperation(value = "authUDFFunc", notes = "AUTHORIZED_UDF_FUNC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZED_UDF_FUNCTION_ERROR)
@AccessLogAnnotation
public Result authorizedUDFFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
Map<String, Object> result = resourceService.authorizedUDFFunction(loginUser, userId);
return returnDataList(result);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import java.io.IOException;
import java.util.Map;
import org.springframework.web.multipart.MultipartFile;
/**
* resources service
*/
public interface ResourcesService {
/**
* create directory
*
* @param loginUser login user
* @param name alias
* @param description description
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create directory result
*/
Result<Object> createDirectory(User loginUser,
String name,
String description,
ResourceType type,
int pid,
String currentDir);
/**
* create resource
*
* @param loginUser login user
* @param name alias
* @param desc description
* @param file file
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/
Result<Object> createResource(User loginUser,
String name,
String desc,
ResourceType type,
MultipartFile file,
int pid,
String currentDir);
/**
* update resource
* @param loginUser login user
* @param resourceId resource id
* @param name name
* @param desc description
* @param type resource type
* @param file resource file
* @return update result code
*/
Result<Object> updateResource(User loginUser,
int resourceId,
String name,
String desc,
ResourceType type,
MultipartFile file);
/**
* query resources list paging
*
* @param loginUser login user
* @param type resource type
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return resource list page
*/
Result queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize);
/**
* query resource list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
Map<String, Object> queryResourceList(User loginUser, ResourceType type);
/**
* query resource list by program type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
Map<String, Object> queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType);
/**
* delete resource
*
* @param loginUser login user
* @param resourceId resource id
* @return delete result code
* @throws IOException exception
*/
Result<Object> delete(User loginUser, int resourceId) throws IOException;
/**
* verify resource by name and type
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @return true if the resource name not exists, otherwise return false
*/
Result<Object> verifyResourceName(String fullName, ResourceType type,User loginUser);
/**
* verify resource by full name or pid and type
* @param fullName resource full name
* @param id resource id
* @param type resource type
* @return true if the resource full name or pid not exists, otherwise return false
*/
Result<Object> queryResource(String fullName,Integer id,ResourceType type);
/**
* view resource file online
*
* @param resourceId resource id
* @param skipLineNum skip line number
* @param limit limit
* @return resource content
*/
Result<Object> readResource(int resourceId, int skipLineNum, int limit);
/**
* create resource file online
*
* @param loginUser login user
* @param type resource type
* @param fileName file name
* @param fileSuffix file suffix
* @param desc description
* @param content content
* @return create result code
*/
Result<Object> onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory);
/**
* updateProcessInstance resource
*
* @param resourceId resource id
* @param content content
* @return update result cod
*/
Result<Object> updateResourceContent(int resourceId, String content);
/**
* download file
*
* @param resourceId resource id
* @return resource content
* @throws IOException exception
*/
org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException;
/**
* list all file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
Map<String, Object> authorizeResourceTree(User loginUser, Integer userId);
/**
* unauthorized file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
Map<String, Object> unauthorizedFile(User loginUser, Integer userId);
/**
* unauthorized udf function
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId);
/**
* authorized udf function
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
Map<String, Object> authorizedUDFFunction(User loginUser, Integer userId);
/**
* authorized file
*
* @param loginUser login user
* @param userId user id
* @return authorized result
*/
Map<String, Object> authorizedFile(User loginUser, Integer userId);
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.ALIAS;
import static org.apache.dolphinscheduler.common.Constants.CONTENT;
import static org.apache.dolphinscheduler.common.Constants.JAR;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.ResourcesUser;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.apache.commons.beanutils.BeanMap;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.io.Files;
/**
* resources service impl
*/
@Service
public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService {
private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class);
@Autowired
private ResourceMapper resourcesMapper;
@Autowired
private UdfFuncMapper udfFunctionMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private UserMapper userMapper;
@Autowired
private ResourceUserMapper resourceUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create directory
*
* @param loginUser login user
* @param name alias
* @param description description
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create directory result
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> createDirectory(User loginUser,
String name,
String description,
ResourceType type,
int pid,
String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
result = verifyResource(loginUser, type, fullName, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource directory {} has exist, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now);
try {
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!"class".equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (DuplicateKeyException e) {
logger.error("resource directory {} has exist, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
} catch (Exception e) {
logger.error("resource already exists, can't recreate ", e);
throw new ServiceException("resource already exists, can't recreate");
}
//create directory in hdfs
createDirectory(loginUser,fullName,type,result);
return result;
}
/**
* create resource
*
* @param loginUser login user
* @param name alias
* @param desc description
* @param file file
* @param type type
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> createResource(User loginUser,
String name,
String desc,
ResourceType type,
MultipartFile file,
int pid,
String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
result = verifyPid(loginUser, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
result = verifyFile(name, type, file);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// check resource name exists
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name));
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now);
try {
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!"class".equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (Exception e) {
logger.error("resource already exists, can't recreate ", e);
throw new ServiceException("resource already exists, can't recreate");
}
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
}
return result;
}
/**
* check resource is exists
*
* @param fullName fullName
* @param type type
* @return true if resource exists
*/
private boolean checkResourceExists(String fullName, int type) {
Boolean existResource = resourcesMapper.existResource(fullName, type);
return existResource == Boolean.TRUE;
}
/**
* update resource
* @param loginUser login user
* @param resourceId resource id
* @param name name
* @param desc description
* @param type resource type
* @param file resource file
* @return update result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> updateResource(User loginUser,
int resourceId,
String name,
String desc,
ResourceType type,
MultipartFile file) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, resource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
putMsg(result, Status.SUCCESS);
return result;
}
//check resource already exists
String originFullName = resource.getFullName();
String originResourceName = resource.getAlias();
String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/") + 1),name);
if (!originResourceName.equals(name) && checkResourceExists(fullName, type.ordinal())) {
logger.error("resource {} already exists, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
result = verifyFile(name, type, file);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// query tenant by user id
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// verify whether the resource exists in storage
// get the path of origin file in storage
String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
try {
if (!HadoopUtils.getInstance().exists(originHdfsFileName)) {
logger.error("{} not exist", originHdfsFileName);
putMsg(result,Status.RESOURCE_NOT_EXIST);
return result;
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new ServiceException(Status.HDFS_OPERATION_ERROR);
}
if (!resource.isDirectory()) {
//get the origin file suffix
String originSuffix = Files.getFileExtension(originFullName);
String suffix = Files.getFileExtension(fullName);
boolean suffixIsChanged = false;
if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) {
suffixIsChanged = true;
}
if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) {
suffixIsChanged = true;
}
//verify whether suffix is changed
if (suffixIsChanged) {
//need verify whether this resource is authorized to other users
Map<String, Object> columnMap = new HashMap<>();
columnMap.put("resources_id", resourceId);
List<ResourcesUser> resourcesUsers = resourceUserMapper.selectByMap(columnMap);
if (CollectionUtils.isNotEmpty(resourcesUsers)) {
List<Integer> userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList());
List<User> users = userMapper.selectBatchIds(userIds);
String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString();
logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames);
putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames);
return result;
}
}
}
// updateResource data
Date now = new Date();
resource.setAlias(name);
resource.setFileName(name);
resource.setFullName(fullName);
resource.setDescription(desc);
resource.setUpdateTime(now);
if (file != null) {
resource.setSize(file.getSize());
}
try {
resourcesMapper.updateById(resource);
if (resource.isDirectory()) {
List<Integer> childrenResource = listAllChildren(resource,false);
if (CollectionUtils.isNotEmpty(childrenResource)) {
String matcherFullName = Matcher.quoteReplacement(fullName);
List<Resource> childResourceList;
Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]);
List<Resource> resourceList = resourcesMapper.listResourceByIds(childResIdArray);
childResourceList = resourceList.stream().map(t -> {
t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
resourcesMapper.batchUpdateResource(childResourceList);
if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
}
}
} else if (ResourceType.UDF.equals(resource.getType())) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId});
if (CollectionUtils.isNotEmpty(udfFuncs)) {
udfFuncs = udfFuncs.stream().map(t -> {
t.setResourceName(fullName);
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
udfFunctionMapper.batchUpdateUdfFunc(udfFuncs);
}
}
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
} catch (Exception e) {
logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e);
throw new ServiceException(Status.UPDATE_RESOURCE_ERROR);
}
// if name unchanged, return directly without moving on HDFS
if (originResourceName.equals(name) && file == null) {
return result;
}
if (file != null) {
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("upload resource: {} file: {} failed.", name, RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
}
if (!fullName.equals(originFullName)) {
try {
HadoopUtils.getInstance().delete(originHdfsFileName,false);
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new ServiceException(String.format("delete resource: %s failed.", originFullName));
}
}
return result;
}
// get the path of dest file in hdfs
String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName);
try {
logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true);
} catch (Exception e) {
logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e);
putMsg(result,Status.HDFS_COPY_FAIL);
throw new ServiceException(Status.HDFS_COPY_FAIL);
}
return result;
}
private Result<Object> verifyFile(String name, ResourceType type, MultipartFile file) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (file != null) {
// file is empty
if (file.isEmpty()) {
logger.error("file is empty: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_FILE_IS_EMPTY);
return result;
}
// file suffix
String fileSuffix = Files.getFileExtension(file.getOriginalFilename());
String nameSuffix = Files.getFileExtension(name);
// determine file suffix
if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
// rename file suffix and original suffix must be consistent
logger.error("rename file suffix and original suffix must be consistent: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE);
return result;
}
//If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar
if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) {
logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg());
putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR);
return result;
}
if (file.getSize() > Constants.MAX_FILE_SIZE) {
logger.error("file size is too large: {}", RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT);
return result;
}
}
return result;
}
/**
* query resources list paging
*
* @param loginUser login user
* @param type resource type
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return resource list page
*/
@Override
public Result queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
Page<Resource> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
}
if (directoryId != -1) {
Resource directory = resourcesMapper.selectById(directoryId);
if (directory == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
}
List<Integer> resourcesIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 0);
IPage<Resource> resourceIPage = resourcesMapper.queryResourcePaging(page, userId, directoryId, type.ordinal(), searchVal,resourcesIds);
PageInfo<Resource> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int)resourceIPage.getTotal());
pageInfo.setTotalList(resourceIPage.getRecords());
result.setData(pageInfo);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* create directory
* @param loginUser login user
* @param fullName full name
* @param type resource type
* @param result Result
*/
private void createDirectory(User loginUser,String fullName,ResourceType type,Result<Object> result) {
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode);
try {
if (!HadoopUtils.getInstance().exists(resourceRootPath)) {
createTenantDirIfNotExists(tenantCode);
}
if (!HadoopUtils.getInstance().mkdir(directoryName)) {
logger.error("create resource directory {} of hdfs failed",directoryName);
putMsg(result,Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("create resource directory: %s failed.", directoryName));
}
} catch (Exception e) {
logger.error("create resource directory {} of hdfs failed",directoryName);
putMsg(result,Status.HDFS_OPERATION_ERROR);
throw new ServiceException(String.format("create resource directory: %s failed.", directoryName));
}
}
/**
* upload file to hdfs
*
* @param loginUser login user
* @param fullName full name
* @param file file
*/
private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) {
// save to local
String fileSuffix = Files.getFileExtension(file.getOriginalFilename());
String nameSuffix = Files.getFileExtension(fullName);
// determine file suffix
if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
return false;
}
// query tenant
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
// random file name
String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString());
// save file to hdfs, and delete original file
String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode);
try {
// if tenant dir not exists
if (!HadoopUtils.getInstance().exists(resourcePath)) {
createTenantDirIfNotExists(tenantCode);
}
org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename);
HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true);
} catch (Exception e) {
FileUtils.deleteFile(localFilename);
logger.error(e.getMessage(), e);
return false;
}
return true;
}
/**
* query resource list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@Override
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>();
List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type);
Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query resource list by program type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@Override
public Map<String, Object> queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) {
Map<String, Object> result = new HashMap<>();
List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type);
String suffix = ".jar";
if (programType != null) {
switch (programType) {
case JAVA:
case SCALA:
break;
case PYTHON:
suffix = ".py";
break;
default:
}
}
List<Resource> resources = new ResourceFilter(suffix, new ArrayList<>(allResourceList)).filter();
Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete resource
*
* @param loginUser login user
* @param resourceId resource id
* @return delete result code
* @throws IOException exception
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> delete(User loginUser, int resourceId) throws IOException {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// get resource by id
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, resource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResources();
Map<Integer, Set<Long>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
// get all children of the resource
List<Integer> allChildren = listAllChildren(resource,true);
Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]);
//if resource type is UDF,need check whether it is bound by UDF function
if (resource.getType() == (ResourceType.UDF)) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
logger.error("can't be deleted,because it is bound by UDF functions:{}", udfFuncs);
putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName());
return result;
}
}
if (resourceIdSet.contains(resource.getPid())) {
logger.error("can't be deleted,because it is used of process definition");
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
resourceIdSet.retainAll(allChildren);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
// get hdfs file by type
String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
//delete data in database
resourcesMapper.deleteIds(needDeleteResourceIdArray);
resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray);
//delete file on hdfs
HadoopUtils.getInstance().delete(hdfsFilename, true);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify resource by name and type
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @return true if the resource name not exists, otherwise return false
*/
@Override
public Result<Object> verifyResourceName(String fullName, ResourceType type, User loginUser) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (checkResourceExists(fullName, type.ordinal())) {
logger.error("resource type:{} name:{} has exist, can't create again.", type, RegexUtils.escapeNRT(fullName));
putMsg(result, Status.RESOURCE_EXIST);
} else {
// query tenant
Tenant tenant = tenantMapper.queryById(loginUser.getTenantId());
if (tenant != null) {
String tenantCode = tenant.getTenantCode();
try {
String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
if (HadoopUtils.getInstance().exists(hdfsFilename)) {
logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, RegexUtils.escapeNRT(fullName), hdfsFilename);
putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename);
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result,Status.HDFS_OPERATION_ERROR);
}
} else {
putMsg(result,Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
}
}
return result;
}
/**
* verify resource by full name or pid and type
* @param fullName resource full name
* @param id resource id
* @param type resource type
* @return true if the resource full name or pid not exists, otherwise return false
*/
@Override
public Result<Object> queryResource(String fullName, Integer id, ResourceType type) {
Result<Object> result = new Result<>();
if (StringUtils.isBlank(fullName) && id == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
return result;
}
if (StringUtils.isNotBlank(fullName)) {
List<Resource> resourceList = resourcesMapper.queryResource(fullName,type.ordinal());
if (CollectionUtils.isEmpty(resourceList)) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
putMsg(result, Status.SUCCESS);
result.setData(resourceList.get(0));
} else {
Resource resource = resourcesMapper.selectById(id);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Resource parentResource = resourcesMapper.selectById(resource.getPid());
if (parentResource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
putMsg(result, Status.SUCCESS);
result.setData(parentResource);
}
return result;
}
/**
* view resource file online
*
* @param resourceId resource id
* @param skipLineNum skip line number
* @param limit limit
* @return resource content
*/
@Override
public Result<Object> readResource(int resourceId, int skipLineNum, int limit) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// get resource by id
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//check preview or not by file suffix
String nameSuffix = Files.getFileExtension(resource.getAlias());
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
// hdfs path
String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName());
logger.info("resource hdfs path is {}", hdfsFileName);
try {
if (HadoopUtils.getInstance().exists(hdfsFileName)) {
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit);
putMsg(result, Status.SUCCESS);
Map<String, Object> map = new HashMap<>();
map.put(ALIAS, resource.getAlias());
map.put(CONTENT, String.join("\n", content));
result.setData(map);
} else {
logger.error("read file {} not exist in hdfs", hdfsFileName);
putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName);
}
} catch (Exception e) {
logger.error("Resource {} read failed", hdfsFileName, e);
putMsg(result, Status.HDFS_OPERATION_ERROR);
}
return result;
}
/**
* create resource file online
*
* @param loginUser login user
* @param type resource type
* @param fileName file name
* @param fileSuffix file suffix
* @param desc description
* @param content content
* @param pid pid
* @param currentDir current directory
* @return create result code
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDir) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
//check file suffix
String nameSuffix = fileSuffix.trim();
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support create", nameSuffix);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String name = fileName.trim() + "." + nameSuffix;
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name);
result = verifyResource(loginUser, type, fullName, pid);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
// save data
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now);
resourcesMapper.insert(resource);
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
}
}
result.setData(resultMap);
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
result = uploadContentToHdfs(fullName, tenantCode, content);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
throw new ServiceException(result.getMsg());
}
return result;
}
private Result<Object> checkResourceUploadStartupState() {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
return result;
}
private Result<Object> verifyResource(User loginUser, ResourceType type, String fullName, int pid) {
Result<Object> result = verifyResourceName(fullName, type, loginUser);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
return verifyPid(loginUser, pid);
}
private Result<Object> verifyPid(User loginUser, int pid) {
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid);
if (parentResource == null) {
putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, parentResource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
}
return result;
}
/**
* updateProcessInstance resource
*
* @param resourceId resource id
* @param content content
* @return update result cod
*/
@Override
@Transactional(rollbackFor = Exception.class)
public Result<Object> updateResourceContent(int resourceId, String content) {
Result<Object> result = checkResourceUploadStartupState();
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
return result;
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
logger.error("read file not exist, resource id {}", resourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
//check can edit by file suffix
String nameSuffix = Files.getFileExtension(resource.getAlias());
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs();
if (StringUtils.isNotEmpty(resourceViewSuffixs)) {
List<String> strList = Arrays.asList(resourceViewSuffixs.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("resource suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)) {
return result;
}
resource.setSize(content.getBytes().length);
resource.setUpdateTime(new Date());
resourcesMapper.updateById(resource);
result = uploadContentToHdfs(resource.getFullName(), tenantCode, content);
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
throw new ServiceException(result.getMsg());
}
return result;
}
/**
* @param resourceName resource name
* @param tenantCode tenant code
* @param content content
* @return result
*/
private Result<Object> uploadContentToHdfs(String resourceName, String tenantCode, String content) {
Result<Object> result = new Result<>();
String localFilename = "";
String hdfsFileName = "";
try {
localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString());
if (!FileUtils.writeContent2File(content, localFilename)) {
// write file fail
logger.error("file {} fail, content is {}", localFilename, RegexUtils.escapeNRT(content));
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// get resource file hdfs path
hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName);
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
logger.info("resource hdfs path is {}, resource dir is {}", hdfsFileName, resourcePath);
HadoopUtils hadoopUtils = HadoopUtils.getInstance();
if (!hadoopUtils.exists(resourcePath)) {
// create if tenant dir not exists
createTenantDirIfNotExists(tenantCode);
}
if (hadoopUtils.exists(hdfsFileName)) {
hadoopUtils.delete(hdfsFileName, false);
}
hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true);
} catch (Exception e) {
logger.error(e.getMessage(), e);
result.setCode(Status.HDFS_OPERATION_ERROR.getCode());
result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName));
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* download file
*
* @param resourceId resource id
* @return resource content
* @throws IOException exception
*/
@Override
public org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException {
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState());
throw new ServiceException("hdfs not startup");
}
Resource resource = resourcesMapper.selectById(resourceId);
if (resource == null) {
logger.error("download file not exist, resource id {}", resourceId);
return null;
}
if (resource.isDirectory()) {
logger.error("resource id {} is directory,can't download it", resourceId);
throw new ServiceException("can't download directory");
}
int userId = resource.getUserId();
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("user id {} not exists", userId);
throw new ServiceException(String.format("resource owner id %d not exist",userId));
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant id {} not exists", user.getTenantId());
throw new ServiceException(String.format("The tenant id %d of resource owner not exist",user.getTenantId()));
}
String tenantCode = tenant.getTenantCode();
String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
String localFileName = FileUtils.getDownloadFilename(resource.getAlias());
logger.info("resource hdfs path is {}, download local filename is {}", hdfsFileName, localFileName);
HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true);
return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName);
}
/**
* list all file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> authorizeResourceTree(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
List<Resource> resourceList;
if (isAdmin(loginUser)) {
// admin gets all resources except userId
resourceList = resourcesMapper.queryResourceExceptUserId(userId);
} else {
// non-admins users get their own resources
resourceList = resourcesMapper.queryResourceListAuthored(loginUser.getId(), -1);
}
List<ResourceComponent> list;
if (CollectionUtils.isNotEmpty(resourceList)) {
Visitor visitor = new ResourceTreeVisitor(resourceList);
list = visitor.visit().getChildren();
} else {
list = new ArrayList<>(0);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* unauthorized file
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> unauthorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
List<Resource> resourceList;
if (isAdmin(loginUser)) {
// admin gets all resources except userId
resourceList = resourcesMapper.queryResourceExceptUserId(userId);
} else {
// non-admins users get their own resources
resourceList = resourcesMapper.queryResourceListAuthored(loginUser.getId(), -1);
}
List<Resource> list;
if (resourceList != null && !resourceList.isEmpty()) {
Set<Resource> resourceSet = new HashSet<>(resourceList);
List<Resource> authedResourceList = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM);
getAuthorizedResourceList(resourceSet, authedResourceList);
list = new ArrayList<>(resourceSet);
} else {
list = new ArrayList<>(0);
}
Visitor visitor = new ResourceTreeVisitor(list);
result.put(Constants.DATA_LIST, visitor.visit().getChildren());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* unauthorized udf function
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@Override
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
List<UdfFunc> udfFuncList;
if (isAdmin(loginUser)) {
// admin gets all udfs except userId
udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId);
} else {
// non-admins users get their own udfs
udfFuncList = udfFunctionMapper.selectByMap(Collections.singletonMap("user_id", loginUser.getId()));
}
List<UdfFunc> resultList = new ArrayList<>();
Set<UdfFunc> udfFuncSet;
if (CollectionUtils.isNotEmpty(udfFuncList)) {
udfFuncSet = new HashSet<>(udfFuncList);
List<UdfFunc> authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId);
getAuthorizedResourceList(udfFuncSet, authedUDFFuncList);
resultList = new ArrayList<>(udfFuncSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized udf function
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
@Override
public Map<String, Object> authorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
List<UdfFunc> udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId);
result.put(Constants.DATA_LIST, udfFuncs);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized file
*
* @param loginUser login user
* @param userId user id
* @return authorized result
*/
@Override
public Map<String, Object> authorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
List<Resource> authedResources = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM);
Visitor visitor = new ResourceTreeVisitor(authedResources);
String visit = JSONUtils.toJsonString(visitor.visit(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(visit);
String jsonTreeStr = JSONUtils.toJsonString(visitor.visit().getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(jsonTreeStr);
result.put(Constants.DATA_LIST, visitor.visit().getChildren());
putMsg(result,Status.SUCCESS);
return result;
}
/**
* get authorized resource list
*
* @param resourceSet resource set
* @param authedResourceList authorized resource list
*/
private void getAuthorizedResourceList(Set<?> resourceSet, List<?> authedResourceList) {
Set<?> authedResourceSet;
if (CollectionUtils.isNotEmpty(authedResourceList)) {
authedResourceSet = new HashSet<>(authedResourceList);
resourceSet.removeAll(authedResourceSet);
}
}
/**
* get tenantCode by UserId
*
* @param userId user id
* @param result return result
* @return tenant code
*/
private String getTenantCode(int userId,Result<Object> result) {
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("user {} not exists", userId);
putMsg(result, Status.USER_NOT_EXIST,userId);
return null;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return null;
}
return tenant.getTenantCode();
}
/**
* list all children id
* @param resource resource
* @param containSelf whether add self to children list
* @return all children id
*/
List<Integer> listAllChildren(Resource resource,boolean containSelf) {
List<Integer> childList = new ArrayList<>();
if (resource.getId() != -1 && containSelf) {
childList.add(resource.getId());
}
if (resource.isDirectory()) {
listAllChildren(resource.getId(),childList);
}
return childList;
}
/**
* list all children id
* @param resourceId resource id
* @param childList child list
*/
void listAllChildren(int resourceId,List<Integer> childList) {
List<Integer> children = resourcesMapper.listChildren(resourceId);
for (int childId : children) {
childList.add(childId);
listAllChildren(childId, childList);
}
}
/**
* query authored resource list (own and authorized)
* @param loginUser login user
* @param type ResourceType
* @return all authored resource list
*/
private List<Resource> queryAuthoredResourceList(User loginUser, ResourceType type) {
List<Resource> relationResources;
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
userId = 0;
relationResources = new ArrayList<>();
} else {
// query resource relation
relationResources = queryResourceList(userId, 0);
}
// filter by resource type
List<Resource> relationTypeResources =
relationResources.stream().filter(rs -> rs.getType() == type).collect(Collectors.toList());
List<Resource> ownResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal());
ownResourceList.addAll(relationTypeResources);
return ownResourceList;
}
/**
* query resource list by userId and perm
* @param userId userId
* @param perm perm
* @return resource list
*/
private List<Resource> queryResourceList(Integer userId, int perm) {
List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, perm);
return CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourcesMapper.queryResourceListById(resIds);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-ui-next/src/layouts/content/components/timezone/index.module.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.icon {
margin: 0 12px;
}
.custom-select {
margin: 10px;
width: auto;
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-ui-next/src/service/modules/resources/index.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { axios, downloadFile } from '@/service/service'
import {
ResourceTypeReq,
NameReq,
FileNameReq,
FullNameReq,
IdReq,
ContentReq,
DescriptionReq,
CreateReq,
UserIdReq,
OnlineCreateReq,
ProgramTypeReq,
ListReq,
ViewResourceReq,
ResourceIdReq,
UdfFuncReq
} from './types'
export function queryResourceListPaging(
params: ListReq & IdReq & ResourceTypeReq
): any {
return axios({
url: '/resources',
method: 'get',
params
})
}
export function queryResourceById(
params: ResourceTypeReq & FullNameReq & IdReq,
id: number
): any {
return axios({
url: `/resources/${id}`,
method: 'get',
params
})
}
export function createResource(
data: CreateReq & FileNameReq & NameReq & ResourceTypeReq
): any {
return axios({
url: '/resources',
method: 'post',
data
})
}
export function authorizedFile(params: UserIdReq): any {
return axios({
url: '/resources/authed-file',
method: 'get',
params
})
}
export function authorizeResourceTree(params: UserIdReq): any {
return axios({
url: '/resources/authed-resource-tree',
method: 'get',
params
})
}
export function authUDFFunc(params: UserIdReq): any {
return axios({
url: '/resources/authed-udf-func',
method: 'get',
params
})
}
export function createDirectory(
data: CreateReq & NameReq & ResourceTypeReq
): any {
return axios({
url: '/resources/directory',
method: 'post',
data
})
}
export function queryResourceList(params: ResourceTypeReq): any {
return axios({
url: '/resources/list',
method: 'get',
params
})
}
export function onlineCreateResource(
data: OnlineCreateReq & FileNameReq & ResourceTypeReq
): any {
return axios({
url: '/resources/online-create',
method: 'post',
data
})
}
export function queryResourceByProgramType(
params: ResourceTypeReq & ProgramTypeReq
): any {
return axios({
url: '/resources/query-by-type',
method: 'get',
params
})
}
export function queryUdfFuncListPaging(params: ListReq): any {
return axios({
url: '/resources/udf-func',
method: 'get',
params
})
}
export function queryUdfFuncList(params: IdReq & ListReq): any {
return axios({
url: '/resources/udf-func/list',
method: 'get',
params
})
}
export function verifyUdfFuncName(params: NameReq): any {
return axios({
url: '/resources/udf-func/verify-name',
method: 'get',
params
})
}
export function deleteUdfFunc(id: number): any {
return axios({
url: `/resources/udf-func/${id}`,
method: 'delete'
})
}
export function unAuthUDFFunc(params: UserIdReq): any {
return axios({
url: '/resources/unauth-udf-func',
method: 'get',
params
})
}
export function verifyResourceName(params: FullNameReq & ResourceTypeReq): any {
return axios({
url: '/resources/verify-name',
method: 'get',
params
})
}
export function queryResource(
params: FullNameReq & ResourceTypeReq,
id: IdReq
): any {
return axios({
url: `/resources/verify-name/${id}`,
method: 'get',
params
})
}
export function updateResource(
data: NameReq & ResourceTypeReq & IdReq & DescriptionReq,
id: number
): any {
return axios({
url: `/resources/${id}`,
method: 'put',
data
})
}
export function deleteResource(id: number): any {
return axios({
url: `/resources/${id}`,
method: 'delete'
})
}
export function downloadResource(id: number): void {
downloadFile(`resources/${id}/download`)
}
export function viewUIUdfFunction(id: IdReq): any {
return axios({
url: `/resources/${id}/udf-func`,
method: 'get'
})
}
export function updateResourceContent(data: ContentReq, id: number): any {
return axios({
url: `/resources/${id}/update-content`,
method: 'put',
data
})
}
export function viewResource(params: ViewResourceReq, id: number): any {
return axios({
url: `/resources/${id}/view`,
method: 'get',
params
})
}
export function createUdfFunc(
data: UdfFuncReq,
resourceId: ResourceIdReq
): any {
return axios({
url: `/resources/${resourceId}/udf-func`,
method: 'post',
data
})
}
export function updateUdfFunc(
data: UdfFuncReq,
resourceId: ResourceIdReq,
id: number
): any {
return axios({
url: `/resources/${resourceId}/udf-func/${id}`,
method: 'put',
data
})
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-ui-next/src/service/modules/resources/types.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
interface FileReq {
file: any
}
interface ResourceTypeReq {
type: 'FILE' | 'UDF'
programType?: string
}
interface UdfTypeReq {
type: 'HIVE' | 'SPARK'
}
interface NameReq {
name: string
}
interface FileNameReq {
fileName: string
}
interface FullNameReq {
fullName: string
}
interface IdReq {
id: number
}
interface ContentReq {
content: string
}
interface DescriptionReq {
description?: string
}
interface CreateReq extends ResourceTypeReq, DescriptionReq {
currentDir: string
pid: number
}
interface UserIdReq {
userId: number
}
interface OnlineCreateReq extends CreateReq, ContentReq {
suffix: string
}
interface ProgramTypeReq {
programType: 'JAVA' | 'SCALA' | 'PYTHON'
}
interface ListReq {
pageNo: number
pageSize: number
searchVal?: string
}
interface ViewResourceReq {
limit: number
skipLineNum: number
}
interface ResourceIdReq {
resourceId: number
}
interface UdfFuncReq extends UdfTypeReq, DescriptionReq, ResourceIdReq {
className: string
funcName: string
argTypes?: string
database?: string
}
interface ResourceFile {
id: number
pid: number
alias: string
userId: number
type: string
directory: boolean
fileName: string
fullName: string
description: string
size: number
updateTime: string
}
interface ResourceListRes {
currentPage: number
pageSize: number
start: number
total: number
totalList: ResourceFile[]
}
interface ResourceViewRes {
alias: string
content: string
}
export {
FileReq,
ResourceTypeReq,
UdfTypeReq,
NameReq,
FileNameReq,
FullNameReq,
IdReq,
ContentReq,
DescriptionReq,
CreateReq,
UserIdReq,
OnlineCreateReq,
ProgramTypeReq,
ListReq,
ViewResourceReq,
ResourceIdReq,
UdfFuncReq,
ResourceListRes,
ResourceViewRes
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,836 | [Bug][UI Next][V1.0.0-Alpha] create directory/file path wrong | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![](https://vip2.loli.io/2022/03/11/qdZsCHgkaFJrmY3.png)
### What you expected to happen
above.
### How to reproduce
create index folder ```test2``` in root path. And then entering the ```test2``` folder. And then create folder ```test1```. And then returning to the root path. And then create folder ```test2``` success.
In the same path can not contain two identical file/folder.
There are two issues:
1. Send request path error while creating folder
2. Folder hierarchy display error
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8836 | https://github.com/apache/dolphinscheduler/pull/8892 | db64dcade8446267bce7423940f133492c07eaac | c7e80e42d79f56e237d296d430386860724588ff | "2022-03-11T08:33:58Z" | java | "2022-03-15T06:19:19Z" | dolphinscheduler-ui-next/src/views/resource/file/index.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { useRouter } from 'vue-router'
import {
defineComponent,
onMounted,
ref,
reactive,
Ref,
watch,
inject
} from 'vue'
import {
NIcon,
NSpace,
NDataTable,
NButtonGroup,
NButton,
NPagination,
NInput
} from 'naive-ui'
import { useI18n } from 'vue-i18n'
import { SearchOutlined } from '@vicons/antd'
import Card from '@/components/card'
import { useTable } from './table/use-table'
import { useFileState } from './use-file'
import ResourceFolderModal from './folder'
import ResourceUploadModal from './upload'
import ResourceRenameModal from './rename'
import { IRenameFile } from './types'
import type { Router } from 'vue-router'
import styles from './index.module.scss'
export default defineComponent({
name: 'File',
inject: ['reload'],
setup() {
const router: Router = useRouter()
const fileId = ref(Number(router.currentRoute.value.params.id) || -1)
const reload: any = inject('reload')
const resourceListRef = ref()
const folderShowRef = ref(false)
const uploadShowRef = ref(false)
const renameShowRef = ref(false)
const serachRef = ref()
const renameInfo = reactive({
id: -1,
name: '',
description: ''
})
const paginationReactive = reactive({
page: 1,
pageSize: 10,
itemCount: 0,
pageSizes: [10, 30, 50]
})
const handleUpdatePage = (page: number) => {
paginationReactive.page = page
resourceListRef.value = getResourceListState(
fileId.value,
serachRef.value,
paginationReactive.page,
paginationReactive.pageSize
)
}
const handleUpdatePageSize = (pageSize: number) => {
paginationReactive.page = 1
paginationReactive.pageSize = pageSize
resourceListRef.value = getResourceListState(
fileId.value,
serachRef.value,
paginationReactive.page,
paginationReactive.pageSize
)
}
const handleShowModal = (showRef: Ref<Boolean>) => {
showRef.value = true
}
const setPagination = (count: number) => {
paginationReactive.itemCount = count
}
const { getResourceListState } = useFileState(setPagination)
const handleConditions = () => {
resourceListRef.value = getResourceListState(
fileId.value,
serachRef.value
)
}
const handleCreateFolder = () => {
handleShowModal(folderShowRef)
}
const handleCreateFile = () => {
const name = fileId.value
? 'resource-subfile-create'
: 'resource-file-create'
router.push({
name,
params: { id: fileId.value }
})
}
const handleUploadFile = () => {
handleShowModal(uploadShowRef)
}
const handleRenameFile: IRenameFile = (id, name, description) => {
renameInfo.id = id
renameInfo.name = name
renameInfo.description = description
handleShowModal(renameShowRef)
}
const updateList = () => {
resourceListRef.value = getResourceListState(
fileId.value,
serachRef.value
)
}
onMounted(() => {
resourceListRef.value = getResourceListState(fileId.value)
})
watch(
() => router.currentRoute.value.params.id,
// @ts-ignore
() => reload()
)
return {
fileId,
serachRef,
folderShowRef,
uploadShowRef,
renameShowRef,
handleShowModal,
resourceListRef,
updateList,
handleConditions,
handleCreateFolder,
handleCreateFile,
handleUploadFile,
handleRenameFile,
handleUpdatePage,
handleUpdatePageSize,
pagination: paginationReactive,
renameInfo
}
},
render() {
const { t } = useI18n()
const { columnsRef } = useTable(this.handleRenameFile, this.updateList)
const {
handleConditions,
handleCreateFolder,
handleCreateFile,
handleUploadFile
} = this
return (
<div>
<Card style={{ marginBottom: '8px' }}>
<div class={styles['conditions-model']}>
<NSpace>
<NButtonGroup>
<NButton
onClick={handleCreateFolder}
class='btn-create-directory'
>
{t('resource.file.create_folder')}
</NButton>
<NButton onClick={handleCreateFile} class='btn-create-file'>
{t('resource.file.create_file')}
</NButton>
<NButton onClick={handleUploadFile} class='btn-upload-file'>
{t('resource.file.upload_files')}
</NButton>
</NButtonGroup>
</NSpace>
<div class={styles.right}>
<div class={styles['form-box']}>
<div class={styles.list}>
<NButton onClick={handleConditions}>
<NIcon>
<SearchOutlined />
</NIcon>
</NButton>
</div>
<div class={styles.list}>
<NInput
placeholder={t('resource.file.enter_keyword_tips')}
v-model={[this.serachRef, 'value']}
/>
</div>
</div>
</div>
</div>
</Card>
<Card title={t('resource.file.file_manage')}>
<NDataTable
remote
columns={columnsRef}
data={this.resourceListRef?.value.table}
striped
size={'small'}
class={styles['table-box']}
row-class-name='items'
/>
<div class={styles.pagination}>
<NPagination
v-model:page={this.pagination.page}
v-model:pageSize={this.pagination.pageSize}
pageSizes={this.pagination.pageSizes}
item-count={this.pagination.itemCount}
onUpdatePage={this.handleUpdatePage}
onUpdatePageSize={this.handleUpdatePageSize}
show-quick-jumper
show-size-picker
/>
</div>
<ResourceFolderModal
v-model:show={this.folderShowRef}
onUpdateList={this.updateList}
/>
<ResourceUploadModal
v-model:show={this.uploadShowRef}
onUpdateList={this.updateList}
/>
<ResourceRenameModal
v-model:show={this.renameShowRef}
id={this.renameInfo.id}
name={this.renameInfo.name}
description={this.renameInfo.description}
onUpdateList={this.updateList}
/>
</Card>
</div>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,877 | [Bug] [UI Next][V1.0.0-Alpha] The dag map displays error when I changed the pre tasks. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I changed the pre tasks in task editor model, the dag map displays error.
### What you expected to happen
The dag map displays correct as old ui page.
<img width="711" alt="image" src="https://user-images.githubusercontent.com/97265214/158150470-364e99fd-f42d-4b89-9d27-dbbef65c9b09.png">
### How to reproduce
1. Open the workflow definitions page.
2. Edit a task node.
3. Change the pre task, and then save.
<img width="671" alt="image" src="https://user-images.githubusercontent.com/97265214/158150381-449d8aee-929f-4b54-99ca-e864adcb7ba2.png">
<img width="626" alt="image" src="https://user-images.githubusercontent.com/97265214/158150569-cf936097-1e51-493b-b92b-080153645336.png">
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8877 | https://github.com/apache/dolphinscheduler/pull/8895 | 14bb722f6f8d8a4071ebe59f0ffa50df5d782f35 | 8bcbe2a1f466e588a119de32dfb3935af7614a35 | "2022-03-14T10:06:57Z" | java | "2022-03-15T06:55:19Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/use-cell-update.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { Ref } from 'vue'
import type { Graph } from '@antv/x6'
import type { TaskType } from '@/views/projects/task/constants/task-type'
import type { Coordinate } from './types'
import { TASK_TYPES_MAP } from '@/views/projects/task/constants/task-type'
import { useCustomCellBuilder } from './dag-hooks'
import utils from '@/utils'
interface Options {
graph: Ref<Graph | undefined>
}
/**
* Expose some cell query
* @param {Options} options
*/
export function useCellUpdate(options: Options) {
const { graph } = options
const { buildNode } = useCustomCellBuilder()
/**
* Set node name by id
* @param {string} id
* @param {string} name
*/
function setNodeName(id: string, newName: string) {
const node = graph.value?.getCellById(id)
if (node) {
const truncation = utils.truncateText(newName, 18)
node.attr('title/text', truncation)
node.setData({ taskName: newName })
}
}
/**
* Add a node to the graph
* @param {string} id
* @param {string} taskType
* @param {Coordinate} coordinate Default is { x: 100, y: 100 }
*/
function addNode(
id: string,
type: string,
name: string,
flag: string,
coordinate: Coordinate = { x: 100, y: 100 }
) {
if (!TASK_TYPES_MAP[type as TaskType]) {
return
}
const node = buildNode(id, type, name, flag, coordinate)
graph.value?.addNode(node)
}
return {
setNodeName,
addNode
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,877 | [Bug] [UI Next][V1.0.0-Alpha] The dag map displays error when I changed the pre tasks. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
When I changed the pre tasks in task editor model, the dag map displays error.
### What you expected to happen
The dag map displays correct as old ui page.
<img width="711" alt="image" src="https://user-images.githubusercontent.com/97265214/158150470-364e99fd-f42d-4b89-9d27-dbbef65c9b09.png">
### How to reproduce
1. Open the workflow definitions page.
2. Edit a task node.
3. Change the pre task, and then save.
<img width="671" alt="image" src="https://user-images.githubusercontent.com/97265214/158150381-449d8aee-929f-4b54-99ca-e864adcb7ba2.png">
<img width="626" alt="image" src="https://user-images.githubusercontent.com/97265214/158150569-cf936097-1e51-493b-b92b-080153645336.png">
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8877 | https://github.com/apache/dolphinscheduler/pull/8895 | 14bb722f6f8d8a4071ebe59f0ffa50df5d782f35 | 8bcbe2a1f466e588a119de32dfb3935af7614a35 | "2022-03-14T10:06:57Z" | java | "2022-03-15T06:55:19Z" | dolphinscheduler-ui-next/src/views/projects/workflow/components/dag/use-task-edit.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, onMounted, watch } from 'vue'
import type { Ref } from 'vue'
import type { Graph } from '@antv/x6'
import type { Coordinate, NodeData } from './types'
import { TaskType } from '@/views/projects/task/constants/task-type'
import { formatParams } from '@/views/projects/task/components/node/format-data'
import { useCellUpdate } from './dag-hooks'
import { WorkflowDefinition } from './types'
interface Options {
graph: Ref<Graph | undefined>
definition: Ref<WorkflowDefinition | undefined>
}
/**
* Edit task configuration when dbclick
* @param {Options} options
* @returns
*/
export function useTaskEdit(options: Options) {
const { graph, definition } = options
const { addNode, setNodeName } = useCellUpdate({ graph })
const taskDefinitions = ref<NodeData[]>(
definition.value?.taskDefinitionList || []
)
const currTask = ref<NodeData>({
taskType: 'SHELL',
code: 0,
name: ''
})
const taskModalVisible = ref(false)
/**
* Append a new task
*/
function appendTask(code: number, type: TaskType, coordinate: Coordinate) {
addNode(code + '', type, '', 'YES', coordinate)
taskDefinitions.value.push({
code,
taskType: type,
name: ''
})
openTaskModal({ code, taskType: type, name: '' })
}
/**
* Copy a task
*/
function copyTask(
name: string,
code: number,
targetCode: number,
type: TaskType,
flag: string,
coordinate: Coordinate
) {
addNode(code + '', type, name, flag, coordinate)
const definition = taskDefinitions.value.find((t) => t.code === targetCode)
const newDefinition = {
...definition,
code,
name
} as NodeData
taskDefinitions.value.push(newDefinition)
}
/**
* Remove task
* @param {number} code
*/
function removeTasks(codes: number[]) {
taskDefinitions.value = taskDefinitions.value.filter(
(task) => !codes.includes(task.code)
)
}
function openTaskModal(task: NodeData) {
currTask.value = task
taskModalVisible.value = true
}
/**
* Edit task
* @param {number} code
*/
function editTask(code: number) {
const definition = taskDefinitions.value.find((t) => t.code === code)
if (definition) {
currTask.value = definition
}
taskModalVisible.value = true
}
/**
* The confirm event in task config modal
* @param formRef
* @param from
*/
function taskConfirm({ data }: any) {
const taskDef = formatParams(data).taskDefinitionJsonObj as NodeData
// override target config
taskDefinitions.value = taskDefinitions.value.map((task) => {
if (task.code === currTask.value?.code) {
setNodeName(task.code + '', taskDef.name)
return {
...taskDef,
version: task.version,
code: task.code,
taskType: currTask.value.taskType
}
}
return task
})
taskModalVisible.value = false
}
/**
* The cancel event in task config modal
*/
function taskCancel() {
taskModalVisible.value = false
}
onMounted(() => {
if (graph.value) {
graph.value.on('cell:dblclick', ({ cell }) => {
const code = Number(cell.id)
editTask(code)
})
}
})
watch(definition, () => {
taskDefinitions.value = definition.value?.taskDefinitionList || []
})
return {
currTask,
taskModalVisible,
taskConfirm,
taskCancel,
appendTask,
editTask,
copyTask,
taskDefinitions,
removeTasks
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,810 | [Bug] [Master] Process Instance state does not change correctly and may cause Memory Leak | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
In `EventExecuteService`class, If an exception is throw in the `onSuccess` method, then `eventHandlerMap` (the dev branch is `multiThreadFilterMap`) cannot remove the thread of `WorkflowExecuteThread` normally, which has two effects: 1)The `WorkflowExecuteThread` will not handle any events, so processInstance state does not change; 2. There is a risk of memory leak.
Also, I found that exceptions in the `onSuccess` method do not print out logs, making it difficult to locate the problem
![image](https://user-images.githubusercontent.com/29919212/157623159-30f6aa2b-0864-4e08-90d6-5a01246984c5.png)
>
### What you expected to happen
The state of `ProcessInstance`changes normally
### How to reproduce
In the `onSuccess` method throw an exception, can reproduce.
Note:
1. You need to print `eventHandlerMap`and will find that `WorkflowExecuteThread` is always held and will not be removed
2. You won't see any exception messages in the master log
### Anything else
_No response_
### Version
2.0.1
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8810 | https://github.com/apache/dolphinscheduler/pull/8896 | a1bef43981b6a8f0f570dcb54551232696f9449c | 3114c917484e50aa241d20b9593e98625f2680fb | "2022-03-10T09:02:44Z" | java | "2022-03-15T08:34:19Z" | dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThreadPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.runner;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.StateEvent;
import org.apache.dolphinscheduler.common.enums.StateEventType;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.plugin.task.api.enums.ExecutionStatus;
import org.apache.dolphinscheduler.remote.command.StateEventChangeCommand;
import org.apache.dolphinscheduler.remote.processor.StateEventCallbackService;
import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.lang.StringUtils;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
@Component
public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor {
private static final Logger logger = LoggerFactory.getLogger(WorkflowExecuteThreadPool.class);
@Autowired
private MasterConfig masterConfig;
@Autowired
private ProcessService processService;
@Autowired
private ProcessInstanceExecCacheManager processInstanceExecCacheManager;
@Autowired
private StateEventCallbackService stateEventCallbackService;
@Autowired
private StateWheelExecuteThread stateWheelExecuteThread;
/**
* multi-thread filter, avoid handling workflow at the same time
*/
private ConcurrentHashMap<String, WorkflowExecuteThread> multiThreadFilterMap = new ConcurrentHashMap();
@PostConstruct
private void init() {
this.setDaemon(true);
this.setThreadNamePrefix("Workflow-Execute-Thread-");
this.setMaxPoolSize(masterConfig.getExecThreads());
this.setCorePoolSize(masterConfig.getExecThreads());
}
/**
* submit state event
*/
public void submitStateEvent(StateEvent stateEvent) {
WorkflowExecuteThread workflowExecuteThread = processInstanceExecCacheManager.getByProcessInstanceId(stateEvent.getProcessInstanceId());
if (workflowExecuteThread == null) {
logger.error("workflowExecuteThread is null, processInstanceId:{}", stateEvent.getProcessInstanceId());
return;
}
workflowExecuteThread.addStateEvent(stateEvent);
}
/**
* start workflow
*/
public void startWorkflow(WorkflowExecuteThread workflowExecuteThread) {
submit(workflowExecuteThread::startProcess);
}
/**
* execute workflow
*/
public void executeEvent(WorkflowExecuteThread workflowExecuteThread) {
if (!workflowExecuteThread.isStart() || workflowExecuteThread.eventSize() == 0) {
return;
}
if (multiThreadFilterMap.containsKey(workflowExecuteThread.getKey())) {
return;
}
int processInstanceId = workflowExecuteThread.getProcessInstance().getId();
ListenableFuture future = this.submitListenable(() -> {
workflowExecuteThread.handleEvents();
multiThreadFilterMap.put(workflowExecuteThread.getKey(), workflowExecuteThread);
});
future.addCallback(new ListenableFutureCallback() {
@Override
public void onFailure(Throwable ex) {
logger.error("handle events {} failed", processInstanceId, ex);
multiThreadFilterMap.remove(workflowExecuteThread.getKey());
}
@Override
public void onSuccess(Object result) {
if (workflowExecuteThread.workFlowFinish()) {
stateWheelExecuteThread.removeProcess4TimeoutCheck(workflowExecuteThread.getProcessInstance());
processInstanceExecCacheManager.removeByProcessInstanceId(processInstanceId);
notifyProcessChanged(workflowExecuteThread.getProcessInstance());
logger.info("process instance {} finished.", processInstanceId);
}
multiThreadFilterMap.remove(workflowExecuteThread.getKey());
}
});
}
/**
* notify process change
*/
private void notifyProcessChanged(ProcessInstance finishProcessInstance) {
if (Flag.NO == finishProcessInstance.getIsSubProcess()) {
return;
}
Map<ProcessInstance, TaskInstance> fatherMaps = processService.notifyProcessList(finishProcessInstance.getId());
for (ProcessInstance processInstance : fatherMaps.keySet()) {
String address = NetUtils.getAddr(masterConfig.getListenPort());
if (processInstance.getHost().equalsIgnoreCase(address)) {
this.notifyMyself(processInstance, fatherMaps.get(processInstance));
} else {
this.notifyProcess(finishProcessInstance, processInstance, fatherMaps.get(processInstance));
}
}
}
/**
* notify myself
*/
private void notifyMyself(ProcessInstance processInstance, TaskInstance taskInstance) {
logger.info("notify process {} task {} state change", processInstance.getId(), taskInstance.getId());
if (!processInstanceExecCacheManager.contains(processInstance.getId())) {
return;
}
StateEvent stateEvent = new StateEvent();
stateEvent.setTaskInstanceId(taskInstance.getId());
stateEvent.setType(StateEventType.TASK_STATE_CHANGE);
stateEvent.setProcessInstanceId(processInstance.getId());
stateEvent.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
this.submitStateEvent(stateEvent);
}
/**
* notify process's master
*/
private void notifyProcess(ProcessInstance finishProcessInstance, ProcessInstance processInstance, TaskInstance taskInstance) {
String host = processInstance.getHost();
if (StringUtils.isEmpty(host)) {
logger.error("process {} host is empty, cannot notify task {} now", processInstance.getId(), taskInstance.getId());
return;
}
String address = host.split(":")[0];
int port = Integer.parseInt(host.split(":")[1]);
StateEventChangeCommand stateEventChangeCommand = new StateEventChangeCommand(
finishProcessInstance.getId(), 0, finishProcessInstance.getState(), processInstance.getId(), taskInstance.getId()
);
stateEventCallbackService.sendResult(address, port, stateEventChangeCommand.convert2Command());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,875 | [Bug] [UI Next][V1.0.0-Alpha] sqoop task node missing some type when type is export | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/17822915/158135197-50850459-e0d9-40e5-8a7a-2fd8664a1b85.png)
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8875 | https://github.com/apache/dolphinscheduler/pull/8898 | 3114c917484e50aa241d20b9593e98625f2680fb | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | "2022-03-14T08:39:30Z" | java | "2022-03-15T09:09:58Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/fields/use-sqoop-source-type.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, h, watch, computed, unref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource'
import { useCustomParams } from '.'
import styles from '../index.module.scss'
import type { IJsonItem, IOption, ModelType } from '../types'
export function useSourceType(model: { [field: string]: any }): IJsonItem[] {
const { t } = useI18n()
const unCustomSpan = computed(() => (model.isCustomTask ? 0 : 24))
const tableSpan = computed(() =>
model.sourceType === 'MYSQL' && model.srcQueryType === '0' ? 24 : 0
)
const editorSpan = computed(() =>
model.sourceType === 'MYSQL' && model.srcQueryType === '1' ? 24 : 0
)
const columnSpan = computed(() =>
model.sourceType === 'MYSQL' && model.srcColumnType === '1' ? 24 : 0
)
const mysqlSpan = computed(() => (model.sourceType === 'MYSQL' ? 24 : 0))
const hiveSpan = computed(() => (model.sourceType === 'HIVE' ? 24 : 0))
const hdfsSpan = computed(() => (model.sourceType === 'HDFS' ? 24 : 0))
const datasourceSpan = computed(() => (model.sourceType === 'MYSQL' ? 12 : 0))
const sourceTypes = ref([
{
label: 'MYSQL',
value: 'MYSQL'
}
] as IOption[])
const getSourceTypesByModelType = (modelType: ModelType): IOption[] => {
switch (modelType) {
case 'import':
return [
{
label: 'MYSQL',
value: 'MYSQL'
}
]
case 'export':
return [
{
label: 'HDFS',
value: 'HDFS'
},
{
label: 'HIVE',
value: 'HIVE'
}
]
default:
return [
{
label: 'MYSQL',
value: 'MYSQL'
},
{
label: 'HDFS',
value: 'HDFS'
},
{
label: 'HIVE',
value: 'HIVE'
}
]
}
}
watch(
() => model.modelType,
(modelType: ModelType) => {
getSourceTypesByModelType(modelType)
}
)
return [
{
type: 'custom',
field: 'custom-title',
span: unCustomSpan,
widget: h(
'div',
{ class: styles['field-title'] },
t('project.node.data_source')
)
},
{
type: 'select',
field: 'sourceType',
name: t('project.node.type'),
span: unCustomSpan,
options: sourceTypes
},
...useDatasource(
model,
datasourceSpan,
'sourceMysqlType',
'sourceMysqlDatasource'
),
{
type: 'radio',
field: 'srcQueryType',
name: t('project.node.model_type'),
span: mysqlSpan,
options: [
{
label: t('project.node.form'),
value: '0'
},
{
label: 'SQL',
value: '1'
}
]
},
{
type: 'input',
field: 'srcTable',
name: t('project.node.table'),
span: tableSpan,
props: {
placeholder: t('project.node.table_tips')
},
validate: {
trigger: ['input', 'blur'],
required: !!unref(tableSpan),
validator(validate, value) {
if (!!unref(tableSpan) && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'radio',
field: 'srcColumnType',
name: t('project.node.column_type'),
span: tableSpan,
options: [
{ label: t('project.node.all_columns'), value: '0' },
{ label: t('project.node.some_columns'), value: '1' }
]
},
{
type: 'input',
field: 'srcColumns',
name: t('project.node.column'),
span: columnSpan,
props: {
placeholder: t('project.node.column_tips')
},
validate: {
trigger: ['input', 'blur'],
required: !!unref(columnSpan),
validator(validate, value) {
if (!!unref(columnSpan) && !value) {
return new Error(t('project.node.column_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hiveSpan),
validator(validate, value) {
if (!!unref(hiveSpan) && !value) {
return new Error(t('project.node.database_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveTable',
name: t('project.node.table'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hiveSpan),
validator(validate, value) {
if (!!unref(hiveSpan) && !value) {
return new Error(t('project.node.hive_table_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'sourceHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'sourceHdfsExportDir',
name: t('project.node.export_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.export_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hdfsSpan),
validator(validate, value) {
if (!!unref(hdfsSpan) && !value) {
return new Error(t('project.node.export_dir_tips'))
}
}
}
},
{
type: 'editor',
field: 'sourceMysqlSrcQuerySql',
name: t('project.node.sql_statement'),
span: editorSpan,
validate: {
trigger: ['blur', 'input'],
required: !!unref(editorSpan),
validator(validate, value) {
if (!!unref(editorSpan) && !value) {
return new Error(t('project.node.sql_statement_tips'))
}
}
}
},
...useCustomParams({
model,
field: 'mapColumnHive',
name: 'map_column_hive',
isSimple: true,
span: editorSpan
}),
...useCustomParams({
model,
field: 'mapColumnJava',
name: 'map_column_java',
isSimple: true,
span: editorSpan
})
]
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,875 | [Bug] [UI Next][V1.0.0-Alpha] sqoop task node missing some type when type is export | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/17822915/158135197-50850459-e0d9-40e5-8a7a-2fd8664a1b85.png)
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8875 | https://github.com/apache/dolphinscheduler/pull/8898 | 3114c917484e50aa241d20b9593e98625f2680fb | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | "2022-03-14T08:39:30Z" | java | "2022-03-15T09:09:58Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/fields/use-sqoop-target-type.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, h, watch, computed, unref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource'
import styles from '../index.module.scss'
import type { IJsonItem, IOption, SourceType } from '../types'
export function useTargetType(model: { [field: string]: any }): IJsonItem[] {
const { t } = useI18n()
const unCustomSpan = computed(() => (model.isCustomTask ? 0 : 24))
const hiveSpan = computed(() => (model.targetType === 'HIVE' ? 24 : 0))
const hdfsSpan = computed(() => (model.targetType === 'HDFS' ? 24 : 0))
const mysqlSpan = computed(() => (model.targetType === 'MYSQL' ? 24 : 0))
const dataSourceSpan = computed(() => (model.targetType === 'MYSQL' ? 12 : 0))
const updateSpan = computed(() =>
model.targetType === 'MYSQL' && model.isUpdate ? 24 : 0
)
const targetTypes = ref([
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
] as IOption[])
const getTargetTypesBySourceType = (
sourceType: SourceType,
srcQueryType: string
): IOption[] => {
switch (sourceType) {
case 'MYSQL':
if (srcQueryType === '1') {
return [
{
label: 'HDFS',
value: 'HDFS'
}
]
}
return [
{
label: 'HDFS',
value: 'HDFS'
},
{
label: 'HIVE',
value: 'HIVE'
}
]
case 'HDFS':
case 'HIVE':
return [
{
label: 'MYSQL',
value: 'MYSQL'
}
]
default:
return [
{
label: 'HDFS',
value: 'HDFS'
},
{
label: 'HIVE',
value: 'HIVE'
}
]
}
}
watch(
() => [model.sourceType, model.srcQueryType],
([sourceType, srcQueryType]) => {
getTargetTypesBySourceType(sourceType, srcQueryType)
}
)
return [
{
type: 'custom',
field: 'custom-title',
span: unCustomSpan,
widget: h(
'div',
{ class: styles['field-title'] },
t('project.node.data_target')
)
},
{
type: 'select',
field: 'targetType',
name: t('project.node.type'),
span: unCustomSpan,
options: targetTypes
},
{
type: 'input',
field: 'targetHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hiveSpan),
validator(validate, value) {
if (!!unref(hiveSpan) && !value) {
return new Error(t('project.node.database_tips'))
}
}
}
},
{
type: 'input',
field: 'targetHiveTable',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.table')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hiveSpan),
validator(rule, value) {
if (!!unref(hiveSpan) && !value) {
return new Error(t('project.node.hive_table_tips'))
}
}
}
},
{
type: 'switch',
field: 'targetHiveCreateTable',
span: hiveSpan,
name: t('project.node.create_hive_table')
},
{
type: 'switch',
field: 'targetHiveDropDelimiter',
span: hiveSpan,
name: t('project.node.drop_delimiter')
},
{
type: 'switch',
field: 'targetHiveOverWrite',
span: hiveSpan,
name: t('project.node.over_write_src')
},
{
type: 'input',
field: 'targetHiveTargetDir',
name: t('project.node.hive_target_dir'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_target_dir_tips')
}
},
{
type: 'input',
field: 'targetHiveReplaceDelimiter',
name: t('project.node.replace_delimiter'),
span: hiveSpan,
props: {
placeholder: t('project.node.replace_delimiter_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'targetHdfsTargetPath',
name: t('project.node.target_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.target_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(hdfsSpan),
validator(rule, value) {
if (!!unref(hdfsSpan) && !value) {
return new Error(t('project.node.target_dir_tips'))
}
}
}
},
{
type: 'switch',
field: 'targetHdfsDeleteTargetDir',
name: t('project.node.delete_target_dir'),
span: hdfsSpan
},
{
type: 'radio',
field: 'targetHdfsCompressionCodec',
name: t('project.node.compression_codec'),
span: hdfsSpan,
options: COMPRESSIONCODECS
},
{
type: 'radio',
field: 'targetHdfsFileType',
name: t('project.node.file_type'),
span: hdfsSpan,
options: FILETYPES
},
{
type: 'input',
field: 'targetHdfsFieldsTerminated',
name: t('project.node.fields_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetHdfsLinesTerminated',
name: t('project.node.lines_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
...useDatasource(
model,
dataSourceSpan,
'targetMysqlType',
'targetMysqlDatasource'
),
{
type: 'input',
field: 'targetMysqlTable',
name: t('project.node.table'),
span: mysqlSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: !!unref(mysqlSpan),
validator(validate, value) {
if (!!unref(mysqlSpan) && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'input',
field: 'targetMysqlColumns',
name: t('project.node.column'),
span: mysqlSpan,
props: {
placeholder: t('project.node.column_tips')
}
},
{
type: 'input',
field: 'targetMysqlFieldsTerminated',
name: t('project.node.fields_terminated'),
span: mysqlSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetMysqlLinesTerminated',
name: t('project.node.lines_terminated'),
span: mysqlSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
{
type: 'switch',
field: 'targetMysqlIsUpdate',
span: mysqlSpan,
name: t('project.node.is_update')
},
{
type: 'input',
field: 'targetMysqlTargetUpdateKey',
name: t('project.node.update_key'),
span: updateSpan,
props: {
placeholder: t('project.node.update_key_tips')
}
},
{
type: 'radio',
field: 'targetMysqlUpdateMode',
name: t('project.node.update_mode'),
span: updateSpan,
options: [
{
label: t('project.node.only_update'),
value: 'updateonly'
},
{
label: t('project.node.allow_insert'),
value: 'allowinsert'
}
]
}
]
}
const COMPRESSIONCODECS = [
{
label: 'snappy',
value: 'snappy'
},
{
label: 'lzo',
value: 'lzo'
},
{
label: 'gzip',
value: 'gzip'
},
{
label: 'no',
value: ''
}
]
const FILETYPES = [
{
label: 'avro',
value: '--as-avrodatafile'
},
{
label: 'sequence',
value: '--as-sequencefile'
},
{
label: 'text',
value: '--as-textfile'
},
{
label: 'parquet',
value: '--as-parquetfile'
}
]
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,875 | [Bug] [UI Next][V1.0.0-Alpha] sqoop task node missing some type when type is export | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/17822915/158135197-50850459-e0d9-40e5-8a7a-2fd8664a1b85.png)
### What you expected to happen
above.
### How to reproduce
above.
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8875 | https://github.com/apache/dolphinscheduler/pull/8898 | 3114c917484e50aa241d20b9593e98625f2680fb | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | "2022-03-14T08:39:30Z" | java | "2022-03-15T09:09:58Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/fields/use-sqoop.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { watch, computed, unref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useCustomParams, useSourceType, useTargetType } from '.'
import type { IJsonItem, ModelType } from '../types'
export function useSqoop(model: { [field: string]: any }): IJsonItem[] {
const { t } = useI18n()
const customSpan = computed(() => (model.isCustomTask ? 24 : 0))
const unCustomSpan = computed(() => (model.isCustomTask ? 0 : 24))
watch(
() => model.srcQueryType,
() => {}
)
return [
{
type: 'switch',
field: 'isCustomTask',
name: t('project.node.custom_job')
},
{
type: 'input',
field: 'jobName',
name: t('project.node.sqoop_job_name'),
span: unCustomSpan,
props: {
placeholder: t('project.node.sqoop_job_name_tips')
},
validate: {
trigger: ['input', 'blur'],
required: !model.isCustomTask,
validator(validate, value) {
if (!model.isCustomTask && !value) {
return new Error(t('project.node.sqoop_job_name_tips'))
}
}
}
},
{
type: 'select',
field: 'modelType',
name: t('project.node.direct'),
span: unCustomSpan,
options: MODEL_TYPES
},
...useCustomParams({
model,
field: 'hadoopCustomParams',
name: 'hadoop_custom_params',
isSimple: true,
span: unCustomSpan
}),
...useCustomParams({
model,
field: 'sqoopAdvancedParams',
name: 'sqoop_advanced_parameters',
isSimple: true,
span: unCustomSpan
}),
...useSourceType(model),
...useTargetType(model),
{
type: 'input-number',
field: 'concurrency',
name: t('project.node.concurrency'),
span: unCustomSpan,
props: {
placeholder: t('project.node.concurrency_tips')
}
},
{
type: 'editor',
field: 'customShell',
name: t('project.node.custom_script'),
span: customSpan,
validate: {
trigger: ['input', 'trigger'],
required: !!unref(customSpan),
validator(rule, value) {
if (!!unref(customSpan) && !value) {
return new Error(t('project.node.custom_script'))
}
}
}
},
...useCustomParams({
model,
field: 'localParams',
name: 'custom_parameters',
isSimple: true
})
]
}
const MODEL_TYPES = [
{
label: 'import',
value: 'import'
},
{
label: 'export',
value: 'export'
}
] as { label: ModelType; value: ModelType }[]
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,726 | [Bug][UI Next][V1.0.0-Alpha] Workflow timing input box cannot be displayed normally. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/8847400/156962299-033fa60a-7cf7-484e-9f05-ea75710b28c7.png)
### What you expected to happen
week select and business input normal display
### How to reproduce
Open timing for workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8726 | https://github.com/apache/dolphinscheduler/pull/8902 | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | f4b3ed263cadc696ae080c68d1b734a096afabed | "2022-03-07T03:27:36Z" | java | "2022-03-15T09:11:12Z" | dolphinscheduler-ui-next/src/components/crontab/index.module.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.crontab-list {
display: flex;
.crontab-list-item {
display: flex;
vertical-align: middle;
align-items: center;
width: 460px;
> div {
margin: 5px;
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,726 | [Bug][UI Next][V1.0.0-Alpha] Workflow timing input box cannot be displayed normally. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/8847400/156962299-033fa60a-7cf7-484e-9f05-ea75710b28c7.png)
### What you expected to happen
week select and business input normal display
### How to reproduce
Open timing for workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8726 | https://github.com/apache/dolphinscheduler/pull/8902 | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | f4b3ed263cadc696ae080c68d1b734a096afabed | "2022-03-07T03:27:36Z" | java | "2022-03-15T09:11:12Z" | dolphinscheduler-ui-next/src/components/crontab/modules/day.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, onMounted, PropType, ref, watch } from 'vue'
import { NInputNumber, NRadio, NRadioGroup, NSelect } from 'naive-ui'
import { useI18n } from 'vue-i18n'
import { isStr, isWeek, week, specificWeek, lastWeeks } from '../common'
import styles from '../index.module.scss'
const props = {
dayValue: {
type: String as PropType<string>,
default: '*'
},
weekValue: {
type: String as PropType<string>,
default: '?'
}
}
export default defineComponent({
name: 'CrontabDay',
props,
emits: ['update:dayValue', 'update:weekValue'],
setup(props, ctx) {
const { t } = useI18n()
const options = Array.from({ length: 60 }, (x, i) => ({
label: i.toString(),
value: i
}))
const weekOptions = week.map((v) => ({
label: t(v.label),
value: v.value
}))
const lastWeekOptions = lastWeeks.map((v) => ({
label: t(v.label),
value: v.value
}))
const radioRef = ref()
const dayRef = ref()
const weekRef = ref()
const WkintervalWeekStartRef = ref(2)
const WkintervalWeekPerformRef = ref(2)
const intervalDayStartRef = ref(1)
const intervalDayPerformRef = ref(1)
const WkspecificDayRef = ref<Array<number>>([])
const WkspecificWeekRef = ref<Array<number>>([])
const monthLastDaysRef = ref('L')
const monthLastWorkingDaysRef = ref('LW')
const monthLastWeeksRef = ref('?')
const monthTailBeforeRef = ref(1)
const recentlyWorkingDaysMonthRef = ref(1)
const WkmonthNumWeeksDayRef = ref(1)
const WkmonthNumWeeksWeekRef = ref(1)
/**
* Parse parameter value
*/
const analyticalValue = () => {
const $dayVal = props.dayValue
const $weekVal = props.weekValue
const isWeek1 = $weekVal.indexOf('/') !== -1
const isWeek2 = $weekVal.indexOf('#') !== -1
// Initialization
if ($dayVal === '*' && $weekVal === '?') {
radioRef.value = 'everyDay'
return
}
// week
if (isWeek1 || isWeek2 || isWeek($weekVal)) {
dayRef.value = '?'
/**
* Processing by sequence number (excluding days)
* @param [
* WkintervalWeek=>(/),
* WkspecificWeek=>(TUE,WED),
* WkmonthNumWeeks=>(#)
* ]
*/
const hanleWeekOne = () => {
const a = isStr($weekVal, '/') as string[]
WkintervalWeekStartRef.value = parseInt(a[0])
WkintervalWeekPerformRef.value = parseInt(a[1])
dayRef.value = '?'
weekRef.value = `${WkintervalWeekPerformRef.value}/${WkintervalWeekStartRef.value}`
radioRef.value = 'WkintervalWeek'
}
const hanleWeekTwo = () => {
WkspecificWeekRef.value = $weekVal
.split(',')
.map((item) => parseInt(item))
radioRef.value = 'WkspecificWeek'
}
const hanleWeekThree = () => {
const a = isStr($weekVal, '#') as string[]
WkmonthNumWeeksDayRef.value = parseInt(a[0])
WkmonthNumWeeksDayRef.value = parseInt(a[1])
radioRef.value = 'WkmonthNumWeeks'
}
// Processing week
if (isStr($weekVal, '/')) {
hanleWeekOne()
} else if (isStr($weekVal, '#')) {
hanleWeekThree()
} else if (isWeek($weekVal)) {
hanleWeekTwo()
}
} else {
weekRef.value = '?'
/**
* Processing by sequence number (excluding week)
* @param [
* everyDay=>(*),
* intervalDay=>(1/1),
* specificDay=>(1,2,5,3,4),
* monthLastDays=>(L),
* monthLastWorkingDays=>(LW),
* monthLastWeeks=>(3L),
* monthTailBefore=>(L-4),
* recentlyWorkingDaysMonth=>(6W)
* ]
*/
const hanleDayOne = () => {
radioRef.value = 'everyDay'
}
const hanleDayTwo = () => {
const a = isStr($dayVal, '/') as string[]
intervalDayStartRef.value = parseInt(a[0])
intervalDayPerformRef.value = parseInt(a[1])
radioRef.value = 'intervalDay'
}
const hanleDayThree = () => {
WkspecificDayRef.value = $dayVal
.split(',')
.map((item) => parseInt(item))
radioRef.value = 'specificDay'
}
const hanleDayFour = () => {
radioRef.value = 'monthLastDays'
}
const hanleDayFive = () => {
radioRef.value = 'monthLastWorkingDays'
}
const hanleDaySix = () => {
monthLastWeeksRef.value = $dayVal
radioRef.value = 'monthLastWeeks'
}
const hanleDaySeven = () => {
const a = isStr($dayVal, '-') as string[]
monthTailBeforeRef.value = parseInt(a[1])
radioRef.value = 'monthTailBefore'
}
const hanleDayEight = () => {
recentlyWorkingDaysMonthRef.value = parseInt(
$dayVal.slice(0, $dayVal.length - 1)
)
radioRef.value = 'recentlyWorkingDaysMonth'
}
if ($dayVal === '*') {
hanleDayOne()
} else if (isStr($dayVal, '/')) {
hanleDayTwo()
} else if ($dayVal === 'L') {
hanleDayFour()
} else if ($dayVal === 'LW') {
hanleDayFive()
} else if ($dayVal.charAt($dayVal.length - 1) === 'L') {
hanleDaySix()
} else if (isStr($dayVal, '-')) {
hanleDaySeven()
} else if ($dayVal.charAt($dayVal.length - 1) === 'W') {
hanleDayEight()
} else {
hanleDayThree()
}
}
}
// Every few weeks
const onWkintervalWeekPerform = (value: number | null) => {
WkintervalWeekPerformRef.value = value || 0
if (radioRef.value === 'WkintervalWeek') {
dayRef.value = '?'
weekRef.value = `${WkintervalWeekStartRef.value}/${WkintervalWeekPerformRef.value}`
}
}
// Every few weeks
const onWkintervalWeekStart = (value: number | null) => {
WkintervalWeekStartRef.value = value || 0
if (radioRef.value === 'WkintervalWeek') {
dayRef.value = '?'
weekRef.value = `${WkintervalWeekStartRef.value}/${WkintervalWeekPerformRef.value}`
}
}
// Interval start time(1)
const onIntervalDayStart = (value: number | null) => {
intervalDayStartRef.value = value || 0
if (radioRef.value === 'intervalDay') {
intervalDaySet()
}
}
// Interval execution time(2)
const onIntervalDayPerform = (value: number | null) => {
intervalDayPerformRef.value = value || 0
if (radioRef.value === 'intervalDay') {
intervalDaySet()
}
}
// Specific day of the week (multiple choice)
const onWkspecificWeek = (arr: Array<number>) => {
WkspecificWeekRef.value = arr
if (radioRef.value === 'WkspecificWeek') {
dayRef.value = '?'
weekRef.value = arr.join(',')
}
}
// Specific days (multiple choices)
const onWkspecificDay = (arr: Array<number>) => {
WkspecificDayRef.value = arr
if (radioRef.value === 'specificDay') {
weekRef.value = '?'
dayRef.value = arr.join(',')
}
}
const onMonthLastWeeks = (value: string | null) => {
monthLastWeeksRef.value = value || '?'
if (radioRef.value === 'monthLastWeeks') {
weekRef.value = value
dayRef.value = '?'
}
}
// Specific days
const onSpecificDays = (arr: Array<number>) => {
WkspecificDayRef.value = arr
if (radioRef.value === 'specificDay') {
specificSet()
}
}
// By the end of this month
const onMonthTailBefore = (value: number | null) => {
monthTailBeforeRef.value = value || 0
if (radioRef.value === 'monthTailBefore') {
dayRef.value = `L-${monthTailBeforeRef.value}`
}
}
// Last working day
const onRecentlyWorkingDaysMonth = (value: number | null) => {
recentlyWorkingDaysMonthRef.value = value || 0
if (radioRef.value === 'recentlyWorkingDaysMonth') {
dayRef.value = `${recentlyWorkingDaysMonthRef.value}W`
}
}
// On the day of this month
const onWkmonthNumWeeksDay = (value: number | null) => {
WkmonthNumWeeksDayRef.value = value || 0
if (radioRef.value === 'WkmonthNumWeeks') {
weekRef.value = `${WkmonthNumWeeksWeekRef.value}#${WkmonthNumWeeksDayRef.value}`
}
}
// On the week of this month
const onWkmonthNumWeeksWeek = (value: number | null) => {
if (radioRef.value === 'WkmonthNumWeeks') {
dayRef.value = '?'
weekRef.value = `${value}#${WkmonthNumWeeksDayRef.value}`
}
}
// Reset every day
const everyDaySet = () => {
dayRef.value = '*'
}
// Reset interval week starts from *
const WkintervalWeekReset = () => {
weekRef.value = `${WkintervalWeekStartRef.value}/${WkintervalWeekPerformRef.value}`
}
// Reset interval days
const intervalDaySet = () => {
dayRef.value = `${intervalDayStartRef.value}/${intervalDayPerformRef.value}`
}
// Specific week (multiple choices)
const WkspecificWeekReset = () => {
weekRef.value = WkspecificWeekRef.value.length
? WkspecificWeekRef.value.join(',')
: '*'
}
// Reset specific days
const specificSet = () => {
if (WkspecificDayRef.value.length) {
dayRef.value = WkspecificDayRef.value.join(',')
} else {
dayRef.value = '*'
}
}
// On the last day of the month
const monthLastDaysReset = () => {
dayRef.value = monthLastDaysRef.value
}
// On the last working day of the month
const monthLastWorkingDaysReset = () => {
dayRef.value = monthLastWorkingDaysRef.value
}
// At the end of the month*
const monthLastWeeksReset = () => {
dayRef.value = monthLastWeeksRef.value
}
// By the end of this month
const monthTailBeforeReset = () => {
dayRef.value = `L-${monthTailBeforeRef.value}`
}
// Last working day (Monday to Friday) to this month
const recentlyWorkingDaysMonthReset = () => {
dayRef.value = `${recentlyWorkingDaysMonthRef.value}W`
}
// On the day of this month
const WkmonthNumReset = () => {
weekRef.value = `${WkmonthNumWeeksWeekRef.value}#${WkmonthNumWeeksDayRef.value}`
}
const updateRadioDay = (value: string) => {
switch (value) {
case 'everyDay':
weekRef.value = '?'
everyDaySet()
break
case 'WkintervalWeek':
dayRef.value = '?'
WkintervalWeekReset()
break
case 'intervalDay':
weekRef.value = '?'
intervalDaySet()
break
case 'WkspecificWeek':
dayRef.value = '?'
WkspecificWeekReset()
break
case 'specificDay':
weekRef.value = '?'
specificSet()
break
case 'monthLastDays':
weekRef.value = '?'
monthLastDaysReset()
break
case 'monthLastWorkingDays':
weekRef.value = '?'
monthLastWorkingDaysReset()
break
case 'monthLastWeeks':
weekRef.value = '1L'
monthLastWeeksReset()
break
case 'monthTailBefore':
weekRef.value = '?'
monthTailBeforeReset()
break
case 'recentlyWorkingDaysMonth':
weekRef.value = '?'
recentlyWorkingDaysMonthReset()
break
case 'WkmonthNumWeeks':
dayRef.value = '?'
WkmonthNumReset()
break
}
}
watch(
() => dayRef.value,
() => ctx.emit('update:dayValue', dayRef.value.toString())
)
watch(
() => weekRef.value,
() => ctx.emit('update:weekValue', weekRef.value.toString())
)
onMounted(() => analyticalValue())
return {
options,
weekOptions,
lastWeekOptions,
radioRef,
WkintervalWeekStartRef,
WkintervalWeekPerformRef,
intervalDayStartRef,
intervalDayPerformRef,
WkspecificWeekRef,
WkspecificDayRef,
monthLastWeeksRef,
monthTailBeforeRef,
recentlyWorkingDaysMonthRef,
WkmonthNumWeeksDayRef,
WkmonthNumWeeksWeekRef,
updateRadioDay,
onWkintervalWeekStart,
onWkintervalWeekPerform,
onIntervalDayStart,
onIntervalDayPerform,
onSpecificDays,
onWkspecificWeek,
onWkspecificDay,
onMonthLastWeeks,
onMonthTailBefore,
onRecentlyWorkingDaysMonth,
onWkmonthNumWeeksDay,
onWkmonthNumWeeksWeek
}
},
render() {
const { t } = useI18n()
return (
<NRadioGroup
v-model:value={this.radioRef}
onUpdateValue={this.updateRadioDay}
>
<NRadio class={styles['crontab-list']} value={'everyDay'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.every_day')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'WkintervalWeek'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.every')}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={7}
v-model:value={this.WkintervalWeekPerformRef}
onUpdateValue={this.onWkintervalWeekPerform}
/>
</div>
<div>{t('crontab.day_carried_out')}</div>
<div>
<NSelect
style={{ width: '150px' }}
options={this.weekOptions}
defaultValue={this.WkintervalWeekStartRef}
v-model:value={this.WkintervalWeekStartRef}
onUpdateValue={this.onWkintervalWeekStart}
/>
</div>
<div>{t('crontab.start')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'intervalDay'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.every')}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={31}
v-model:value={this.intervalDayPerformRef}
onUpdateValue={this.onIntervalDayPerform}
/>
</div>
<div>{t('crontab.day_carried_out')}</div>
<div>
<NInputNumber
defaultValue={0}
min={1}
max={31}
v-model:value={this.intervalDayStartRef}
onUpdateValue={this.onIntervalDayStart}
/>
</div>
<div>{t('crontab.day_start')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'WkspecificWeek'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.specific_week')}</div>
<div>
<NSelect
style={{ width: '300px' }}
multiple
options={specificWeek}
placeholder={t('crontab.specific_week_tip')}
v-model:value={this.WkspecificWeekRef}
onUpdateValue={this.onWkspecificWeek}
/>
</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'specificDay'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.specific_day')}</div>
<div>
<NSelect
style={{ width: '300px' }}
multiple
options={this.options}
placeholder={t('crontab.specific_day_tip')}
v-model:value={this.WkspecificDayRef}
onUpdateValue={this.onWkspecificDay}
/>
</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'monthLastDays'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.last_day_of_month')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'monthLastWorkingDays'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.last_work_day_of_month')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'monthLastWeeks'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.last_of_month')}</div>
<div>
<NSelect
style={{ width: '150px' }}
options={this.lastWeekOptions}
defaultValue={this.monthLastWeeksRef}
v-model:value={this.monthLastWeeksRef}
onUpdateValue={this.onMonthLastWeeks}
/>
</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'monthTailBefore'}>
<div class={styles['crontab-list-item']}>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={31}
v-model:value={this.monthTailBeforeRef}
onUpdateValue={this.onMonthTailBefore}
/>
</div>
<div>{t('crontab.before_end_of_month')}</div>
</div>
</NRadio>
<NRadio
class={styles['crontab-list']}
value={'recentlyWorkingDaysMonth'}
>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.recent_business_day_to_month')}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={31}
v-model:value={this.recentlyWorkingDaysMonthRef}
onUpdateValue={this.onRecentlyWorkingDaysMonth}
/>
</div>
<div>{t('crontab.one_day')}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'WkmonthNumWeeks'}>
<div class={styles['crontab-list-item']}>
<div>{t('crontab.in_this_months')}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={31}
v-model:value={this.WkmonthNumWeeksDayRef}
onUpdateValue={this.onWkmonthNumWeeksDay}
/>
</div>
<div>
<NSelect
style={{ width: '150px' }}
options={this.weekOptions}
defaultValue={this.WkmonthNumWeeksWeekRef}
v-model:value={this.WkmonthNumWeeksWeekRef}
onUpdateValue={this.onWkmonthNumWeeksWeek}
/>
</div>
</div>
</NRadio>
</NRadioGroup>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,726 | [Bug][UI Next][V1.0.0-Alpha] Workflow timing input box cannot be displayed normally. | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
![image](https://user-images.githubusercontent.com/8847400/156962299-033fa60a-7cf7-484e-9f05-ea75710b28c7.png)
### What you expected to happen
week select and business input normal display
### How to reproduce
Open timing for workflow
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8726 | https://github.com/apache/dolphinscheduler/pull/8902 | 8402367d826ae742dedc2c6cfbe51fbab57a1fcf | f4b3ed263cadc696ae080c68d1b734a096afabed | "2022-03-07T03:27:36Z" | java | "2022-03-15T09:11:12Z" | dolphinscheduler-ui-next/src/components/crontab/modules/time.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import { defineComponent, onMounted, PropType, ref, toRefs, watch } from 'vue'
import { NInputNumber, NRadio, NRadioGroup, NSelect } from 'naive-ui'
import { useI18n } from 'vue-i18n'
import { ICrontabI18n } from '../types'
import { isStr } from '../common'
import styles from '../index.module.scss'
const props = {
timeValue: {
type: String as PropType<string>,
default: '*'
},
timeI18n: {
type: Object as PropType<ICrontabI18n>,
require: true
}
}
export default defineComponent({
name: 'CrontabTime',
props,
emits: ['update:timeValue'],
setup(props, ctx) {
const options = Array.from({ length: 60 }, (x, i) => ({
label: i.toString(),
value: i
}))
const timeRef = ref()
const radioRef = ref()
const intervalStartRef = ref(0)
const intervalPerformRef = ref(0)
const specificTimesRef = ref<Array<number>>([])
const cycleStartRef = ref(0)
const cycleEndRef = ref(0)
/**
* Parse parameter value
*/
const analyticalValue = () => {
const $timeVal = props.timeValue
// Interval time
const $interval = isStr($timeVal, '/')
// Specific time
const $specific = isStr($timeVal, ',')
// Cycle time
const $cycle = isStr($timeVal, '-')
// Every time
if ($timeVal === '*') {
radioRef.value = 'everyTime'
timeRef.value = '*'
return
}
// Positive integer (times)
if (
($timeVal.length === 1 && _.isInteger(parseInt($timeVal))) ||
($timeVal.length === 2 && _.isInteger(parseInt($timeVal)))
) {
radioRef.value = 'specificTime'
specificTimesRef.value = [parseInt($timeVal)]
return
}
// Interval times
if ($interval) {
radioRef.value = 'intervalTime'
intervalStartRef.value = parseInt($interval[0])
intervalPerformRef.value = parseInt($interval[1])
timeRef.value = `${intervalStartRef.value}/${intervalPerformRef.value}`
return
}
// Specific times
if ($specific) {
radioRef.value = 'specificTime'
specificTimesRef.value = $specific.map((item) => parseInt(item))
return
}
// Cycle time
if ($cycle) {
radioRef.value = 'cycleTime'
cycleStartRef.value = parseInt($cycle[0])
cycleEndRef.value = parseInt($cycle[1])
timeRef.value = `${cycleStartRef.value}-${cycleEndRef.value}`
return
}
}
// Interval start time(1)
const onIntervalStart = (value: number | null) => {
intervalStartRef.value = value || 0
if (radioRef.value === 'intervalTime') {
timeRef.value = `${intervalStartRef.value}/${intervalPerformRef.value}`
}
}
// Interval execution time(2)
const onIntervalPerform = (value: number | null) => {
intervalPerformRef.value = value || 0
if (radioRef.value === 'intervalTime') {
timeRef.value = `${intervalStartRef.value}/${intervalPerformRef.value}`
}
}
// Specific time
const onSpecificTimes = (arr: Array<number>) => {
specificTimesRef.value = arr
if (radioRef.value === 'specificTime') {
specificReset()
}
}
// Cycle start value
const onCycleStart = (value: number | null) => {
cycleStartRef.value = value || 0
if (radioRef.value === 'cycleTime') {
timeRef.value = `${cycleStartRef.value}-${cycleEndRef.value}`
}
}
// Cycle end value
const onCycleEnd = (value: number | null) => {
cycleEndRef.value = value || 0
if (radioRef.value === 'cycleTime') {
timeRef.value = `${cycleStartRef.value}-${cycleEndRef.value}`
}
}
// Reset every time
const everyReset = () => {
timeRef.value = '*'
}
// Reset interval time
const intervalReset = () => {
timeRef.value = `${intervalStartRef.value}/${intervalPerformRef.value}`
}
// Reset specific time
const specificReset = () => {
let timeValue = '*'
if (specificTimesRef.value.length) {
timeValue = specificTimesRef.value.join(',')
}
timeRef.value = timeValue
}
// Reset cycle time
const cycleReset = () => {
timeRef.value = `${cycleStartRef.value}-${cycleEndRef.value}`
}
const updateRadioTime = (value: string) => {
switch (value) {
case 'everyTime':
everyReset()
break
case 'intervalTime':
intervalReset()
break
case 'specificTime':
specificReset()
break
case 'cycleTime':
cycleReset()
break
}
}
watch(
() => timeRef.value,
() => ctx.emit('update:timeValue', timeRef.value.toString())
)
onMounted(() => analyticalValue())
return {
options,
radioRef,
intervalStartRef,
intervalPerformRef,
specificTimesRef,
cycleStartRef,
cycleEndRef,
updateRadioTime,
onIntervalStart,
onIntervalPerform,
onSpecificTimes,
onCycleStart,
onCycleEnd,
...toRefs(props)
}
},
render() {
const { t } = useI18n()
return (
<NRadioGroup
v-model:value={this.radioRef}
onUpdateValue={this.updateRadioTime}
>
<NRadio class={styles['crontab-list']} value={'everyTime'}>
<div class={styles['crontab-list-item']}>
<div>{t(this.timeI18n!.everyTime)}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'intervalTime'}>
<div class={styles['crontab-list-item']}>
<div>{t(this.timeI18n!.every)}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={59}
v-model:value={this.intervalStartRef}
onUpdateValue={this.onIntervalStart}
/>
</div>
<div>{t(this.timeI18n!.timeCarriedOut)}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={59}
v-model:value={this.intervalPerformRef}
onUpdateValue={this.onIntervalPerform}
/>
</div>
<div>{t(this.timeI18n!.timeStart)}</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'specificTime'}>
<div class={styles['crontab-list-item']}>
<div>{t(this.timeI18n!.specificTime)}</div>
<div>
<NSelect
style={{ width: '300px' }}
multiple
options={this.options}
placeholder={t(this.timeI18n!.specificTimeTip)}
v-model:value={this.specificTimesRef}
onUpdateValue={this.onSpecificTimes}
/>
</div>
</div>
</NRadio>
<NRadio class={styles['crontab-list']} value={'cycleTime'}>
<div class={styles['crontab-list-item']}>
<div>{t(this.timeI18n!.cycleFrom)}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={59}
v-model:value={this.cycleStartRef}
onUpdateValue={this.onCycleStart}
/>
</div>
<div>{t(this.timeI18n!.to)}</div>
<div>
<NInputNumber
defaultValue={0}
min={0}
max={59}
v-model:value={this.cycleEndRef}
onUpdateValue={this.onCycleEnd}
/>
</div>
<div>{t(this.timeI18n!.time)}</div>
</div>
</NRadio>
</NRadioGroup>
)
}
})
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,806 | [Bug-FE][UI Next][V1.0.0-Alpha] Repeat workflow import, the last import file was not cleared | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Repeat workflow import, the last import file was not cleared
<img width="1914" alt="image" src="https://user-images.githubusercontent.com/76080484/157603260-5f082bec-19ad-4745-be65-19a42e4d1552.png">
### What you expected to happen
The file check box can be cleared when repeating workflow imports
### How to reproduce
1. Click Import Workflow
2. Select the workflow to be imported
3. Click Import
4. Click Import workflow again
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8806 | https://github.com/apache/dolphinscheduler/pull/8905 | 242b5dd5b61a86c8a2baeb291d4def4d9fb07b26 | 58e341c19cde597a7e8deb84c6e5f2a53c857e78 | "2022-03-10T06:40:12Z" | java | "2022-03-15T09:43:49Z" | dolphinscheduler-ui-next/src/views/projects/workflow/definition/components/use-modal.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import { reactive, SetupContext } from 'vue'
import { useI18n } from 'vue-i18n'
import { useRoute, useRouter } from 'vue-router'
import type { Router } from 'vue-router'
import { format } from 'date-fns'
import {
importProcessDefinition,
queryProcessDefinitionByCode
} from '@/service/modules/process-definition'
import { queryAllWorkerGroups } from '@/service/modules/worker-groups'
import { queryAllEnvironmentList } from '@/service/modules/environment'
import { listAlertGroupById } from '@/service/modules/alert-group'
import { startProcessInstance } from '@/service/modules/executors'
import {
createSchedule,
updateSchedule,
previewSchedule
} from '@/service/modules/schedules'
import { parseTime } from '@/utils/common'
export function useModal(
state: any,
ctx: SetupContext<('update:show' | 'update:row' | 'updateList')[]>
) {
const { t } = useI18n()
const router: Router = useRouter()
const route = useRoute()
const variables = reactive({
projectCode: Number(route.params.projectCode),
workerGroups: [],
alertGroups: [],
environmentList: [],
startParamsList: [] as Array<{ prop: string; value: string }>,
schedulePreviewList: []
})
const resetImportForm = () => {
state.importFormRef.name = ''
state.importFormRef.file = ''
}
const handleImportDefinition = async () => {
await state.importFormRef.validate()
if (state.saving) return
state.saving = true
try {
const formData = new FormData()
formData.append('file', state.importForm.file)
const code = Number(router.currentRoute.value.params.projectCode)
await importProcessDefinition(formData, code)
window.$message.success(t('project.workflow.success'))
state.saving = false
ctx.emit('updateList')
ctx.emit('update:show')
resetImportForm()
} catch (err) {
state.saving = false
}
}
const handleStartDefinition = async (code: number) => {
await state.startFormRef.validate()
if (state.saving) return
state.saving = true
try {
state.startForm.processDefinitionCode = code
if (state.startForm.startEndTime) {
const start = format(
new Date(state.startForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
new Date(state.startForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
state.startForm.scheduleTime = `${start},${end}`
}
const startParams = {} as any
for (const item of variables.startParamsList) {
if (item.value !== '') {
startParams[item.prop] = item.value
}
}
state.startForm.startParams = !_.isEmpty(startParams)
? JSON.stringify(startParams)
: ''
await startProcessInstance(state.startForm, variables.projectCode)
window.$message.success(t('project.workflow.success'))
state.saving = false
ctx.emit('updateList')
ctx.emit('update:show')
} catch (err) {
state.saving = false
}
}
const handleCreateTiming = async (code: number) => {
await state.timingFormRef.validate()
if (state.saving) return
state.saving = true
try {
const data: any = getTimingData()
data.processDefinitionCode = code
await createSchedule(data, variables.projectCode)
window.$message.success(t('project.workflow.success'))
state.saving = false
ctx.emit('updateList')
ctx.emit('update:show')
} catch (err) {
state.saving = false
}
}
const handleUpdateTiming = async (id: number) => {
await state.timingFormRef.validate()
if (state.saving) return
state.saving = true
try {
const data: any = getTimingData()
data.id = id
await updateSchedule(data, variables.projectCode, id)
window.$message.success(t('project.workflow.success'))
state.saving = false
ctx.emit('updateList')
ctx.emit('update:show')
} catch (err) {
state.saving = false
}
}
const getTimingData = () => {
const start = format(
parseTime(state.timingForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
parseTime(state.timingForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
const data = {
schedule: JSON.stringify({
startTime: start,
endTime: end,
crontab: state.timingForm.crontab,
timezoneId: state.timingForm.timezoneId
}),
failureStrategy: state.timingForm.failureStrategy,
warningType: state.timingForm.warningType,
processInstancePriority: state.timingForm.processInstancePriority,
warningGroupId:
state.timingForm.warningGroupId === ''
? 0
: state.timingForm.warningGroupId,
workerGroup: state.timingForm.workerGroup,
environmentCode: state.timingForm.environmentCode
}
return data
}
const getWorkerGroups = () => {
queryAllWorkerGroups().then((res: any) => {
variables.workerGroups = res.map((item: string) => ({
label: item,
value: item
}))
})
}
const getEnvironmentList = () => {
queryAllEnvironmentList().then((res: any) => {
variables.environmentList = res.map((item: any) => ({
label: item.name,
value: item.code,
workerGroups: item.workerGroups
}))
})
}
const getAlertGroups = () => {
listAlertGroupById().then((res: any) => {
variables.alertGroups = res.map((item: any) => ({
label: item.groupName,
value: item.id
}))
})
}
const getStartParamsList = (code: number) => {
queryProcessDefinitionByCode(code, variables.projectCode).then(
(res: any) => {
variables.startParamsList = res.processDefinition.globalParamList
}
)
}
const getPreviewSchedule = () => {
state.timingFormRef.validate(async (valid: any) => {
if (!valid) {
const projectCode = Number(router.currentRoute.value.params.projectCode)
const start = format(
new Date(state.timingForm.startEndTime[0]),
'yyyy-MM-dd hh:mm:ss'
)
const end = format(
new Date(state.timingForm.startEndTime[1]),
'yyyy-MM-dd hh:mm:ss'
)
const schedule = JSON.stringify({
startTime: start,
endTime: end,
crontab: state.timingForm.crontab
})
previewSchedule({ schedule }, projectCode).then((res: any) => {
variables.schedulePreviewList = res
})
}
})
}
return {
variables,
handleImportDefinition,
handleStartDefinition,
handleCreateTiming,
handleUpdateTiming,
getWorkerGroups,
getAlertGroups,
getEnvironmentList,
getStartParamsList,
getPreviewSchedule
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,835 | [Bug(Serious)-FE][UI Next][V1.0.0-Alpha]Failed to save a workflow containing a child workflow node | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Failed to save a workflow containing a child workflow node
<img width="1900" alt="image" src="https://user-images.githubusercontent.com/76080484/157829805-2aa693cf-6096-4477-af83-b437477bc27a.png">
### What you expected to happen
Successfully saved and ready to run
### How to reproduce
1、First complete a new workflow
2、Create a new workflow that is being edited
3、Drag a sub-workflow component
4、Set the sub-workflow as the first new workflow completed
5、Click Save Workflow to report an error
The front end does not pass information about the child node to the back end
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8835 | https://github.com/apache/dolphinscheduler/pull/8908 | f726730d93d52e646d6b27fbddf23fae643f868c | 9a93910ac5af8512e6f02f664995897e8e0459a5 | "2022-03-11T08:23:35Z" | java | "2022-03-15T11:01:48Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/fields/use-child-node.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, onMounted } from 'vue'
import { useI18n } from 'vue-i18n'
import { uniqBy } from 'lodash'
import {
querySimpleList,
queryProcessDefinitionByCode
} from '@/service/modules/process-definition'
import type { IJsonItem } from '../types'
export function useChildNode({
model,
projectCode,
from,
processName,
code
}: {
model: { [field: string]: any }
projectCode: number
from?: number
processName?: number
code?: number
}): IJsonItem {
const { t } = useI18n()
const options = ref([] as { label: string; value: string }[])
const loading = ref(false)
const getProcessList = async () => {
if (loading.value) return
loading.value = true
const res = await querySimpleList(projectCode)
options.value = res.map((option: { name: string; code: number }) => ({
label: option.name,
value: option.code
}))
loading.value = false
}
const getProcessListByCode = async (processCode: number) => {
if (!processCode) return
const res = await queryProcessDefinitionByCode(processCode, projectCode)
getTaskOptions(res)
}
const getTaskOptions = (processDefinition: {
processTaskRelationList: []
taskDefinitionList: []
}) => {
const { processTaskRelationList = [], taskDefinitionList = [] } =
processDefinition
const preTaskOptions: { code: number; name: string }[] = []
const tasks: { [field: number]: string } = {}
taskDefinitionList.forEach(
(task: { code: number; taskType: string; name: string }) => {
tasks[task.code] = task.name
if (task.code === code) return
if (
task.taskType === 'CONDITIONS' &&
processTaskRelationList.filter(
(relation: { preTaskCode: number }) =>
relation.preTaskCode === task.code
).length >= 2
) {
return
}
preTaskOptions.push({
code: task.code,
name: task.name
})
}
)
model.preTaskOptions = uniqBy(preTaskOptions, 'code')
if (!code) return
const preTasks: number[] = []
const postTaskOptions: { code: number; name: string }[] = []
processTaskRelationList.forEach(
(relation: { preTaskCode: number; postTaskCode: number }) => {
if (relation.preTaskCode === code) {
postTaskOptions.push({
code: relation.postTaskCode,
name: tasks[relation.postTaskCode]
})
}
if (relation.postTaskCode === code && relation.preTaskCode !== 0) {
preTasks.push(relation.preTaskCode)
}
}
)
model.preTasks = preTasks
model.postTaskOptions = postTaskOptions
}
const onChange = (code: number) => {
getProcessListByCode(code)
}
onMounted(() => {
if (from === 1 && processName) {
getProcessListByCode(processName)
}
getProcessList()
})
return {
type: 'select',
field: 'processDefinitionCode',
span: 24,
name: t('project.node.child_node'),
props: {
loading: loading,
'on-update:value': onChange
},
options: options
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,835 | [Bug(Serious)-FE][UI Next][V1.0.0-Alpha]Failed to save a workflow containing a child workflow node | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Failed to save a workflow containing a child workflow node
<img width="1900" alt="image" src="https://user-images.githubusercontent.com/76080484/157829805-2aa693cf-6096-4477-af83-b437477bc27a.png">
### What you expected to happen
Successfully saved and ready to run
### How to reproduce
1、First complete a new workflow
2、Create a new workflow that is being edited
3、Drag a sub-workflow component
4、Set the sub-workflow as the first new workflow completed
5、Click Save Workflow to report an error
The front end does not pass information about the child node to the back end
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8835 | https://github.com/apache/dolphinscheduler/pull/8908 | f726730d93d52e646d6b27fbddf23fae643f868c | 9a93910ac5af8512e6f02f664995897e8e0459a5 | "2022-03-11T08:23:35Z" | java | "2022-03-15T11:01:48Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/format-data.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { find, omit, cloneDeep } from 'lodash'
import type {
INodeData,
ITaskData,
ITaskParams,
ISqoopTargetParams,
ISqoopSourceParams,
ILocalParam,
IDependTask
} from './types'
export function formatParams(data: INodeData): {
processDefinitionCode: string
upstreamCodes: string
taskDefinitionJsonObj: object
} {
const taskParams: ITaskParams = {}
if (
data.taskType === 'SPARK' ||
data.taskType === 'MR' ||
data.taskType === 'FLINK'
) {
taskParams.programType = data.programType
taskParams.mainClass = data.mainClass
if (data.mainJar) {
taskParams.mainJar = { id: data.mainJar }
}
taskParams.deployMode = data.deployMode
taskParams.appName = data.appName
taskParams.mainArgs = data.mainArgs
taskParams.others = data.others
}
if (data.taskType === 'SPARK') {
taskParams.sparkVersion = data.sparkVersion
taskParams.driverCores = data.driverCores
taskParams.driverMemory = data.driverMemory
taskParams.numExecutors = data.numExecutors
taskParams.executorMemory = data.executorMemory
taskParams.executorCores = data.executorCores
}
if (data.taskType === 'FLINK') {
taskParams.flinkVersion = data.flinkVersion
taskParams.jobManagerMemory = data.jobManagerMemory
taskParams.taskManagerMemory = data.taskManagerMemory
taskParams.slot = data.slot
taskParams.taskManager = data.taskManager
taskParams.parallelism = data.parallelism
}
if (data.taskType === 'HTTP') {
taskParams.httpMethod = data.httpMethod
taskParams.httpCheckCondition = data.httpCheckCondition
taskParams.httpParams = data.httpParams
taskParams.url = data.url
taskParams.condition = data.condition
taskParams.connectTimeout = data.connectTimeout
taskParams.socketTimeout = data.socketTimeout
}
if (data.taskType === 'SQOOP') {
taskParams.jobType = data.isCustomTask ? 'CUSTOM' : 'TEMPLATE'
taskParams.localParams = data.localParams
if (data.isCustomTask) {
taskParams.customShell = data.customShell
} else {
taskParams.jobName = data.jobName
taskParams.hadoopCustomParams = data.hadoopCustomParams
taskParams.sqoopAdvancedParams = data.sqoopAdvancedParams
taskParams.concurrency = data.concurrency
taskParams.modelType = data.modelType
taskParams.sourceType = data.sourceType
taskParams.targetType = data.targetType
let targetParams: ISqoopTargetParams = {}
let sourceParams: ISqoopSourceParams = {}
switch (data.targetType) {
case 'HIVE':
targetParams = {
hiveDatabase: data.targetHiveDatabase,
hiveTable: data.targetHiveTable,
createHiveTable: data.targetHiveCreateTable,
dropDelimiter: data.targetHiveDropDelimiter,
hiveOverWrite: data.targetHiveOverWrite,
hiveTargetDir: data.targetHiveTargetDir,
replaceDelimiter: data.targetHiveReplaceDelimiter,
hivePartitionKey: data.targetHivePartitionKey,
hivePartitionValue: data.targetHivePartitionValue
}
break
case 'HDFS':
targetParams = {
targetPath: data.targetHdfsTargetPath,
deleteTargetDir: data.targetHdfsDeleteTargetDir,
compressionCodec: data.targetHdfsCompressionCodec,
fileType: data.targetHdfsFileType,
fieldsTerminated: data.targetHdfsFieldsTerminated,
linesTerminated: data.targetHdfsLinesTerminated
}
break
case 'MYSQL':
targetParams = {
targetType: data.targetMysqlType,
targetDatasource: data.targetMysqlDatasource,
targetTable: data.targetMysqlTable,
targetColumns: data.targetMysqlColumns,
fieldsTerminated: data.targetMysqlFieldsTerminated,
linesTerminated: data.targetMysqlLinesTerminated,
isUpdate: data.targetMysqlIsUpdate,
targetUpdateKey: data.targetMysqlTargetUpdateKey,
targetUpdateMode: data.targetMysqlUpdateMode
}
break
default:
break
}
switch (data.sourceType) {
case 'MYSQL':
sourceParams = {
srcTable: data.srcQueryType === '1' ? '' : data.srcTable,
srcColumnType: data.srcQueryType === '1' ? '0' : data.srcColumnType,
srcColumns:
data.srcQueryType === '1' || data.srcColumnType === '0'
? ''
: data.srcColumns,
srcQuerySql:
data.srcQueryType === '0' ? '' : data.sourceMysqlSrcQuerySql,
srcQueryType: data.srcQueryType,
srcType: data.sourceMysqlType,
srcDatasource: data.sourceMysqlDatasource,
mapColumnHive: data.mapColumnHive,
mapColumnJava: data.mapColumnJava
}
break
case 'HDFS':
sourceParams = {
exportDir: data.sourceHdfsExportDir
}
break
case 'HIVE':
sourceParams = {
hiveDatabase: data.sourceHiveDatabase,
hiveTable: data.sourceHiveTable,
hivePartitionKey: data.sourceHivePartitionKey,
hivePartitionValue: data.sourceHivePartitionValue
}
break
default:
break
}
taskParams.targetParams = JSON.stringify(targetParams)
taskParams.sourceParams = JSON.stringify(sourceParams)
}
}
if (data.taskType === 'SQL') {
taskParams.type = data.type
taskParams.datasource = data.datasource
taskParams.sql = data.sql
taskParams.sqlType = data.sqlType
taskParams.preStatements = data.preStatements
taskParams.postStatements = data.postStatements
}
if (data.taskType === 'PROCEDURE') {
taskParams.type = data.type
taskParams.datasource = data.datasource
taskParams.method = data.method
}
if (data.taskType === 'SEATUNNEL') {
if (data.deployMode === 'local') {
data.master = 'local'
data.masterUrl = ''
data.deployMode = 'client'
}
buildRawScript(data)
}
if (data.taskType === 'SWITCH') {
taskParams.switchResult = {}
taskParams.switchResult.dependTaskList = data.dependTaskList
taskParams.switchResult.nextNode = data.nextNode
}
if (data.taskType === 'CONDITIONS') {
taskParams.dependence = {
relation: data.relation,
dependTaskList: data.dependTaskList
}
}
if (data.taskType === 'DATAX') {
taskParams.customConfig = data.customConfig
if (taskParams.customConfig === 0) {
taskParams.dsType = data.dsType
taskParams.dataSource = data.dataSource
taskParams.dtType = data.dtType
taskParams.dataTarget = data.dataTarget
taskParams.sql = data.sql
taskParams.targetTable = data.targetTable
taskParams.jobSpeedByte = data.jobSpeedByte
taskParams.jobSpeedRecord = data.jobSpeedRecord
taskParams.preStatements = data.preStatements
taskParams.postStatements = data.postStatements
} else {
taskParams.json = data.json
data?.localParams?.map((param: ILocalParam) => {
param.direct = 'IN'
param.type = 'VARCHAR'
})
}
taskParams.xms = data.xms
taskParams.xmx = data.xmx
}
if (data.taskType === 'DEPENDENT') {
const dependTaskList = cloneDeep(data.dependTaskList)?.map(
(taskItem: IDependTask) => {
if (taskItem.dependItemList?.length) {
taskItem.dependItemList.forEach((dependItem) => {
delete dependItem.definitionCodeOptions
delete dependItem.depTaskCodeOptions
delete dependItem.dateOptions
})
}
return taskItem
}
)
taskParams.dependence = {
relation: data.relation,
dependTaskList: dependTaskList
}
}
if (data.taskType === 'DATA_QUALITY') {
taskParams.ruleId = data.ruleId
taskParams.ruleInputParameter = {
check_type: data.check_type,
comparison_execute_sql: data.comparison_execute_sql,
comparison_name: data.comparison_name,
failure_strategy: data.failure_strategy,
operator: data.operator,
src_connector_type: data.src_connector_type,
src_datasource_id: data.src_datasource_id,
src_table: data.src_table,
statistics_execute_sql: data.statistics_execute_sql,
statistics_name: data.statistics_name,
target_connector_type: data.target_connector_type,
target_datasource_id: data.target_datasource_id,
target_table: data.target_table,
threshold: data.threshold
}
taskParams.sparkParameters = {
deployMode: data.deployMode,
driverCores: data.driverCores,
driverMemory: data.driverMemory,
executorCores: data.executorCores,
executorMemory: data.executorMemory,
numExecutors: data.numExecutors,
others: data.others
}
}
if (data.taskType === 'EMR') {
taskParams.type = data.type
taskParams.jobFlowDefineJson = data.jobFlowDefineJson
}
const params = {
processDefinitionCode: data.processName ? String(data.processName) : '',
upstreamCodes: data?.preTasks?.join(','),
taskDefinitionJsonObj: {
code: data.code,
delayTime: data.delayTime ? String(data.delayTime) : '0',
description: data.description,
environmentCode: data.environmentCode || -1,
failRetryInterval: data.failRetryInterval
? String(data.failRetryInterval)
: '0',
failRetryTimes: data.failRetryTimes ? String(data.failRetryTimes) : '0',
flag: data.flag,
name: data.name,
taskGroupId: data.taskGroupId || 0,
taskGroupPriority: data.taskGroupPriority,
taskParams: {
localParams: data.localParams,
rawScript: data.rawScript,
resourceList: data.resourceList?.length
? data.resourceList.map((id: number) => ({ id }))
: [],
...taskParams
},
taskPriority: data.taskPriority,
taskType: data.taskType,
timeout: data.timeout,
timeoutFlag: data.timeoutFlag ? 'OPEN' : 'CLOSE',
timeoutNotifyStrategy: data.timeoutNotifyStrategy?.join(''),
workerGroup: data.workerGroup
}
} as {
processDefinitionCode: string
upstreamCodes: string
taskDefinitionJsonObj: { timeout: number; timeoutNotifyStrategy: string }
}
if (!data.timeoutFlag) {
params.taskDefinitionJsonObj.timeout = 0
params.taskDefinitionJsonObj.timeoutNotifyStrategy = ''
}
return params
}
export function formatModel(data: ITaskData) {
const params = {
...omit(data, [
'environmentCode',
'timeoutFlag',
'timeoutNotifyStrategy',
'taskParams'
]),
...omit(data.taskParams, ['resourceList', 'mainJar', 'localParams']),
environmentCode: data.environmentCode === -1 ? null : data.environmentCode,
timeoutFlag: data.timeoutFlag === 'OPEN',
timeoutNotifyStrategy: [data.timeoutNotifyStrategy] || [],
localParams: data.taskParams?.localParams || []
} as INodeData
if (data.timeoutNotifyStrategy === 'WARNFAILED') {
params.timeoutNotifyStrategy = ['WARN', 'FAILED']
}
if (data.taskParams?.resourceList) {
params.resourceList = data.taskParams.resourceList.map(
(item: { id: number }) => item.id
)
}
if (
data.taskParams?.connectTimeout !== 60000 ||
data.taskParams?.socketTimeout !== 60000
) {
params.timeoutSetting = true
}
if (data.taskParams?.mainJar) {
params.mainJar = data.taskParams?.mainJar.id
}
if (data.taskParams?.method) {
params.method = data.taskParams?.method
}
if (data.taskParams?.targetParams) {
const targetParams: ISqoopTargetParams = JSON.parse(
data.taskParams.targetParams
)
params.targetHiveDatabase = targetParams.hiveDatabase
params.targetHiveTable = targetParams.hiveTable
params.targetHiveCreateTable = targetParams.createHiveTable
params.targetHiveDropDelimiter = targetParams.dropDelimiter
params.targetHiveOverWrite = targetParams.hiveOverWrite
params.targetHiveTargetDir = targetParams.hiveTargetDir
params.targetHiveReplaceDelimiter = targetParams.replaceDelimiter
params.targetHivePartitionKey = targetParams.hivePartitionKey
params.targetHivePartitionValue = targetParams.hivePartitionValue
params.targetHdfsTargetPath = targetParams.targetPath
params.targetHdfsDeleteTargetDir = targetParams.deleteTargetDir
params.targetHdfsCompressionCodec = targetParams.compressionCodec
params.targetHdfsFileType = targetParams.fileType
params.targetHdfsFieldsTerminated = targetParams.fieldsTerminated
params.targetHdfsLinesTerminated = targetParams.linesTerminated
params.targetMysqlType = targetParams.targetType
params.targetMysqlDatasource = targetParams.targetDatasource
params.targetMysqlTable = targetParams.targetTable
params.targetMysqlColumns = targetParams.targetColumns
params.targetMysqlFieldsTerminated = targetParams.fieldsTerminated
params.targetMysqlLinesTerminated = targetParams.linesTerminated
params.targetMysqlIsUpdate = targetParams.isUpdate
params.targetMysqlTargetUpdateKey = targetParams.targetUpdateKey
params.targetMysqlUpdateMode = targetParams.targetUpdateMode
}
if (data.taskParams?.sourceParams) {
const sourceParams: ISqoopSourceParams = JSON.parse(
data.taskParams.sourceParams
)
params.srcTable = sourceParams.srcTable
params.srcColumnType = sourceParams.srcColumnType
params.srcColumns = sourceParams.srcColumns
params.sourceMysqlSrcQuerySql = sourceParams.srcQuerySql
params.srcQueryType = sourceParams.srcQueryType
params.sourceMysqlType = sourceParams.srcType
params.sourceMysqlDatasource = sourceParams.srcDatasource
params.mapColumnHive = sourceParams.mapColumnHive
params.mapColumnJava = sourceParams.mapColumnJava
params.sourceHdfsExportDir = sourceParams.exportDir
params.sourceHiveDatabase = sourceParams.hiveDatabase
params.sourceHiveTable = sourceParams.hiveTable
params.sourceHivePartitionKey = sourceParams.hivePartitionKey
params.sourceHivePartitionValue = sourceParams.hivePartitionValue
}
if (data.taskParams?.rawScript) {
params.rawScript = data.taskParams?.rawScript
}
if (data.taskParams?.switchResult) {
params.switchResult = data.taskParams.switchResult
params.dependTaskList = data.taskParams.switchResult?.dependTaskList
? data.taskParams.switchResult?.dependTaskList
: []
params.nextNode = data.taskParams.switchResult?.nextNode
}
if (data.taskParams?.dependence) {
params.dependTaskList = data.taskParams?.dependence.dependTaskList || []
params.relation = data.taskParams?.dependence.relation
}
if (data.taskParams?.ruleInputParameter) {
params.check_type = data.check_type
params.comparison_execute_sql = data.comparison_execute_sql
params.comparison_name = data.comparison_name
params.failure_strategy = data.failure_strategy
params.operator = data.operator
params.src_connector_type = data.src_connector_type
params.src_datasource_id = data.src_datasource_id
params.src_table = data.src_table
params.statistics_execute_sql = data.statistics_execute_sql
params.statistics_name = data.statistics_name
params.target_connector_type = data.target_connector_type
params.target_datasource_id = data.target_datasource_id
params.target_table = data.target_table
params.threshold = data.threshold
}
if (data.taskParams?.sparkParameters) {
params.deployMode = data.deployMode
params.driverCores = data.driverCores
params.driverMemory = data.driverMemory
params.executorCores = data.executorCores
params.executorMemory = data.executorMemory
params.numExecutors = data.numExecutors
params.others = data.others
}
if (data.taskParams?.jobFlowDefineJson) {
params.jobFlowDefineJson = data.taskParams.jobFlowDefineJson
}
return params
}
const buildRawScript = (model: INodeData) => {
const baseScript = 'sh ${WATERDROP_HOME}/bin/start-waterdrop.sh'
if (!model.resourceList) return
let master = model.master
let masterUrl = model?.masterUrl ? model?.masterUrl : ''
let deployMode = model.deployMode
const queue = model.queue
if (model.deployMode === 'local') {
master = 'local'
masterUrl = ''
deployMode = 'client'
}
if (master === 'yarn' || master === 'local') {
masterUrl = ''
}
let localParams = ''
model?.localParams?.forEach((param: any) => {
localParams = localParams + ' --variable ' + param.prop + '=' + param.value
})
let rawScript = ''
model.resourceList?.forEach((id: number) => {
const item = find(model.resourceFiles, { id: id })
rawScript =
rawScript +
baseScript +
' --master ' +
master +
masterUrl +
' --deploy-mode ' +
deployMode +
' --queue ' +
queue
if (item && item.fullName) {
rawScript = rawScript + ' --config ' + item.fullName
}
rawScript = rawScript + localParams + ' \n'
})
model.rawScript = rawScript ? rawScript : ''
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,835 | [Bug(Serious)-FE][UI Next][V1.0.0-Alpha]Failed to save a workflow containing a child workflow node | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Failed to save a workflow containing a child workflow node
<img width="1900" alt="image" src="https://user-images.githubusercontent.com/76080484/157829805-2aa693cf-6096-4477-af83-b437477bc27a.png">
### What you expected to happen
Successfully saved and ready to run
### How to reproduce
1、First complete a new workflow
2、Create a new workflow that is being edited
3、Drag a sub-workflow component
4、Set the sub-workflow as the first new workflow completed
5、Click Save Workflow to report an error
The front end does not pass information about the child node to the back end
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8835 | https://github.com/apache/dolphinscheduler/pull/8908 | f726730d93d52e646d6b27fbddf23fae643f868c | 9a93910ac5af8512e6f02f664995897e8e0459a5 | "2022-03-11T08:23:35Z" | java | "2022-03-15T11:01:48Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/tasks/use-sub-process.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { reactive } from 'vue'
import * as Fields from '../fields/index'
import type { IJsonItem, INodeData, ITaskData } from '../types'
export function useSubProcess({
projectCode,
from = 0,
readonly,
data
}: {
projectCode: number
from?: number
readonly?: boolean
data?: ITaskData
}) {
const model = reactive({
taskType: 'SUB_PROCESS',
name: '',
flag: 'YES',
description: '',
timeoutFlag: false,
localParams: [],
environmentCode: null,
failRetryInterval: 1,
failRetryTimes: 0,
workerGroup: 'default',
delayTime: 0,
timeout: 30,
processDefinitionCode: ''
} as INodeData)
let extra: IJsonItem[] = []
if (from === 1) {
extra = [
Fields.useTaskType(model, readonly),
Fields.useProcessName({
model,
projectCode,
isCreate: !data?.id,
from,
processName: data?.processName
})
]
}
return {
json: [
Fields.useName(),
...extra,
Fields.useRunFlag(),
Fields.useDescription(),
Fields.useTaskPriority(),
Fields.useWorkerGroup(),
Fields.useEnvironmentName(model, !data?.id),
...Fields.useTaskGroup(model, projectCode),
...Fields.useTimeoutAlarm(model),
Fields.useChildNode({
model,
projectCode,
from,
processName: data?.processName,
code: data?.code
}),
Fields.usePreTasks(model, data?.code)
] as IJsonItem[],
model
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,835 | [Bug(Serious)-FE][UI Next][V1.0.0-Alpha]Failed to save a workflow containing a child workflow node | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Failed to save a workflow containing a child workflow node
<img width="1900" alt="image" src="https://user-images.githubusercontent.com/76080484/157829805-2aa693cf-6096-4477-af83-b437477bc27a.png">
### What you expected to happen
Successfully saved and ready to run
### How to reproduce
1、First complete a new workflow
2、Create a new workflow that is being edited
3、Drag a sub-workflow component
4、Set the sub-workflow as the first new workflow completed
5、Click Save Workflow to report an error
The front end does not pass information about the child node to the back end
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8835 | https://github.com/apache/dolphinscheduler/pull/8908 | f726730d93d52e646d6b27fbddf23fae643f868c | 9a93910ac5af8512e6f02f664995897e8e0459a5 | "2022-03-11T08:23:35Z" | java | "2022-03-15T11:01:48Z" | dolphinscheduler-ui-next/src/views/projects/task/components/node/types.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { VNode } from 'vue'
import type { SelectOption } from 'naive-ui'
import type { TaskType } from '@/views/projects/task/constants/task-type'
import type { IDataBase } from '@/service/modules/data-source/types'
import type {
IFormItem,
IJsonItem,
FormRules,
IJsonItemParams
} from '@/components/form/types'
type ProgramType = 'JAVA' | 'SCALA' | 'PYTHON'
type SourceType = 'MYSQL' | 'HDFS' | 'HIVE'
type ModelType = 'import' | 'export'
type RelationType = 'AND' | 'OR'
type ITaskType = TaskType
interface IOption {
label: string
value: string | number
}
interface ITaskPriorityOption extends SelectOption {
icon: VNode
color: string
}
interface IEnvironmentNameOption {
label: string
value: string
workerGroups?: string[]
}
interface ILocalParam {
prop: string
direct?: string
type?: string
value?: string
}
interface IResponseJsonItem extends Omit<IJsonItemParams, 'type'> {
type: 'input' | 'select' | 'radio' | 'group'
emit: 'change'[]
}
interface IDependpendItem {
depTaskCode?: number
status?: 'SUCCESS' | 'FAILURE'
definitionCodeOptions?: IOption[]
depTaskCodeOptions?: IOption[]
dateOptions?: IOption[]
projectCode?: number
definitionCode?: number
cycle?: 'month' | 'week' | 'day' | 'hour'
dateValue?: string
}
interface IDependTask {
condition?: string
nextNode?: number
relation?: RelationType
dependItemList?: IDependpendItem[]
}
interface ISwitchResult {
dependTaskList?: IDependTask[]
nextNode?: number
}
interface ISourceItem {
id: number
}
interface ISqoopTargetData {
targetHiveDatabase?: string
targetHiveTable?: string
targetHiveCreateTable?: boolean
targetHiveDropDelimiter?: boolean
targetHiveOverWrite?: boolean
targetHiveTargetDir?: string
targetHiveReplaceDelimiter?: string
targetHivePartitionKey?: string
targetHivePartitionValue?: string
targetHdfsTargetPath?: string
targetHdfsDeleteTargetDir?: boolean
targetHdfsCompressionCodec?: string
targetHdfsFileType?: string
targetHdfsFieldsTerminated?: string
targetHdfsLinesTerminated?: string
targetMysqlType?: string
targetMysqlDatasource?: string
targetMysqlTable?: string
targetMysqlColumns?: string
targetMysqlFieldsTerminated?: string
targetMysqlLinesTerminated?: string
targetMysqlIsUpdate?: string
targetMysqlTargetUpdateKey?: string
targetMysqlUpdateMode?: string
}
interface ISqoopSourceData {
srcQueryType?: '1' | '0'
srcTable?: string
srcColumnType?: '1' | '0'
srcColumns?: string
sourceMysqlSrcQuerySql?: string
sourceMysqlType?: string
sourceMysqlDatasource?: string
mapColumnHive?: ILocalParam[]
mapColumnJava?: ILocalParam[]
sourceHdfsExportDir?: string
sourceHiveDatabase?: string
sourceHiveTable?: string
sourceHivePartitionKey?: string
sourceHivePartitionValue?: string
}
interface ISqoopTargetParams {
hiveDatabase?: string
hiveTable?: string
createHiveTable?: boolean
dropDelimiter?: boolean
hiveOverWrite?: boolean
hiveTargetDir?: string
replaceDelimiter?: string
hivePartitionKey?: string
hivePartitionValue?: string
targetPath?: string
deleteTargetDir?: boolean
compressionCodec?: string
fileType?: string
fieldsTerminated?: string
linesTerminated?: string
targetType?: string
targetDatasource?: string
targetTable?: string
targetColumns?: string
isUpdate?: string
targetUpdateKey?: string
targetUpdateMode?: string
}
interface ISqoopSourceParams {
srcTable?: string
srcColumnType?: '1' | '0'
srcColumns?: string
srcQuerySql?: string
srcQueryType?: '1' | '0'
srcType?: string
srcDatasource?: string
mapColumnHive?: ILocalParam[]
mapColumnJava?: ILocalParam[]
exportDir?: string
hiveDatabase?: string
hiveTable?: string
hivePartitionKey?: string
hivePartitionValue?: string
}
interface ISparkParameters {
deployMode?: string
driverCores?: number
driverMemory?: string
executorCores?: number
executorMemory?: string
numExecutors?: number
others?: string
}
interface IRuleParameters {
check_type?: string
comparison_execute_sql?: string
comparison_name?: string
failure_strategy?: string
operator?: string
src_connector_type?: number
src_datasource_id?: number
src_table?: string
statistics_execute_sql?: string
statistics_name?: string
target_connector_type?: number
target_datasource_id?: number
target_table?: string
threshold?: string
}
interface ITaskParams {
resourceList?: ISourceItem[]
mainJar?: ISourceItem
localParams?: ILocalParam[]
rawScript?: string
programType?: string
sparkVersion?: string
flinkVersion?: string
jobManagerMemory?: string
taskManagerMemory?: string
slot?: number
taskManager?: number
parallelism?: number
mainClass?: string
deployMode?: string
appName?: string
driverCores?: number
driverMemory?: string
numExecutors?: number
executorMemory?: string
executorCores?: number
mainArgs?: string
others?: string
httpMethod?: string
httpCheckCondition?: string
httpParams?: []
url?: string
condition?: string
connectTimeout?: number
socketTimeout?: number
type?: string
datasource?: string
sql?: string
sqlType?: string
preStatements?: string[]
postStatements?: string[]
method?: string
jobType?: 'CUSTOM' | 'TEMPLATE'
customShell?: string
jobName?: string
hadoopCustomParams?: ILocalParam[]
sqoopAdvancedParams?: ILocalParam[]
concurrency?: number
modelType?: ModelType
sourceType?: SourceType
targetType?: SourceType
targetParams?: string
sourceParams?: string
queue?: string
master?: string
switchResult?: ISwitchResult
dependTaskList?: IDependTask[]
nextNode?: number
dependence?: {
relation?: RelationType
dependTaskList?: IDependTask[]
}
customConfig?: number
json?: string
dsType?: string
dataSource?: number
dtType?: string
dataTarget?: number
targetTable?: string
jobSpeedByte?: number
jobSpeedRecord?: number
xms?: number
xmx?: number
sparkParameters?: ISparkParameters
ruleId?: number
ruleInputParameter?: IRuleParameters
jobFlowDefineJson?: string
}
interface INodeData
extends Omit<
ITaskParams,
| 'resourceList'
| 'mainJar'
| 'targetParams'
| 'sourceParams'
| 'dependence'
| 'sparkParameters'
>,
ISqoopTargetData,
ISqoopSourceData,
IRuleParameters {
id?: string
taskType?: ITaskType
processName?: number
delayTime?: number
description?: string
environmentCode?: number | null
failRetryInterval?: number
failRetryTimes?: number
flag?: 'YES' | 'NO'
taskGroupId?: number
taskGroupPriority?: number
taskPriority?: string
timeout?: number
timeoutFlag?: boolean
timeoutNotifyStrategy?: string[]
workerGroup?: string
code?: number
name?: string
preTasks?: number[]
preTaskOptions?: []
postTaskOptions?: []
resourceList?: number[]
mainJar?: number
timeoutSetting?: boolean
isCustomTask?: boolean
method?: string
masterUrl?: string
resourceFiles?: { id: number; fullName: string }[] | null
relation?: RelationType
definition?: object
}
interface ITaskData
extends Omit<
INodeData,
'timeoutFlag' | 'taskPriority' | 'timeoutNotifyStrategy'
> {
name?: string
taskPriority?: string
timeoutFlag?: 'OPEN' | 'CLOSE'
timeoutNotifyStrategy?: string | []
taskParams?: ITaskParams
}
export {
ITaskPriorityOption,
IEnvironmentNameOption,
ILocalParam,
ITaskType,
ITaskData,
INodeData,
ITaskParams,
IOption,
IDataBase,
ProgramType,
ModelType,
SourceType,
ISqoopSourceParams,
ISqoopTargetParams,
IDependTask,
IDependpendItem,
IFormItem,
IJsonItem,
FormRules,
IJsonItemParams,
IResponseJsonItem
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,258 | [Feature][python] Custom log for meanful an easier log formatter | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
ATT, for now we just use logging and it should be confgire each file we want to add log, maybe we should add a custom logger to do it
maybe we could just add
```py
log = logging.getLogger(__name__)
```
in each place wo call `logging`
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8258 | https://github.com/apache/dolphinscheduler/pull/8901 | 9a93910ac5af8512e6f02f664995897e8e0459a5 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | "2022-01-29T08:10:54Z" | java | "2022-03-15T11:16:33Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/task.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""DolphinScheduler Task and TaskRelation object."""
import logging
from typing import Dict, List, Optional, Sequence, Set, Tuple, Union
from pydolphinscheduler.constants import (
Delimiter,
TaskFlag,
TaskPriority,
TaskTimeoutFlag,
)
from pydolphinscheduler.core import configuration
from pydolphinscheduler.core.base import Base
from pydolphinscheduler.core.process_definition import (
ProcessDefinition,
ProcessDefinitionContext,
)
from pydolphinscheduler.java_gateway import launch_gateway
class TaskRelation(Base):
"""TaskRelation object, describe the relation of exactly two tasks."""
# Add attr `_KEY_ATTR` to overwrite :func:`__eq__`, it is make set
# `Task.process_definition._task_relations` work correctly.
_KEY_ATTR = {
"pre_task_code",
"post_task_code",
}
_DEFINE_ATTR = {
"pre_task_code",
"post_task_code",
}
_DEFAULT_ATTR = {
"name": "",
"preTaskVersion": 1,
"postTaskVersion": 1,
"conditionType": 0,
"conditionParams": {},
}
def __init__(
self,
pre_task_code: int,
post_task_code: int,
name: Optional[str] = None,
):
super().__init__(name)
self.pre_task_code = pre_task_code
self.post_task_code = post_task_code
def __hash__(self):
return hash(f"{self.pre_task_code} {Delimiter.DIRECTION} {self.post_task_code}")
class Task(Base):
"""Task object, parent class for all exactly task type."""
_DEFINE_ATTR = {
"name",
"code",
"version",
"task_type",
"task_params",
"description",
"flag",
"task_priority",
"worker_group",
"delay_time",
"fail_retry_times",
"fail_retry_interval",
"timeout_flag",
"timeout_notify_strategy",
"timeout",
}
_task_custom_attr: set = set()
DEFAULT_CONDITION_RESULT = {"successNode": [""], "failedNode": [""]}
def __init__(
self,
name: str,
task_type: str,
description: Optional[str] = None,
flag: Optional[str] = TaskFlag.YES,
task_priority: Optional[str] = TaskPriority.MEDIUM,
worker_group: Optional[str] = configuration.WORKFLOW_WORKER_GROUP,
delay_time: Optional[int] = 0,
fail_retry_times: Optional[int] = 0,
fail_retry_interval: Optional[int] = 1,
timeout_flag: Optional[int] = TaskTimeoutFlag.CLOSE,
timeout_notify_strategy: Optional = None,
timeout: Optional[int] = 0,
process_definition: Optional[ProcessDefinition] = None,
local_params: Optional[List] = None,
resource_list: Optional[List] = None,
dependence: Optional[Dict] = None,
wait_start_timeout: Optional[Dict] = None,
condition_result: Optional[Dict] = None,
):
super().__init__(name, description)
self.task_type = task_type
self.flag = flag
self.task_priority = task_priority
self.worker_group = worker_group
self.fail_retry_times = fail_retry_times
self.fail_retry_interval = fail_retry_interval
self.delay_time = delay_time
self.timeout_flag = timeout_flag
self.timeout_notify_strategy = timeout_notify_strategy
self.timeout = timeout
self._process_definition = None
self.process_definition: ProcessDefinition = (
process_definition or ProcessDefinitionContext.get()
)
self._upstream_task_codes: Set[int] = set()
self._downstream_task_codes: Set[int] = set()
self._task_relation: Set[TaskRelation] = set()
# move attribute code and version after _process_definition and process_definition declare
self.code, self.version = self.gen_code_and_version()
# Add task to process definition, maybe we could put into property process_definition latter
if (
self.process_definition is not None
and self.code not in self.process_definition.tasks
):
self.process_definition.add_task(self)
else:
logging.warning(
"Task code %d already in process definition, prohibit re-add task.",
self.code,
)
# Attribute for task param
self.local_params = local_params or []
self.resource_list = resource_list or []
self.dependence = dependence or {}
self.wait_start_timeout = wait_start_timeout or {}
self._condition_result = condition_result or self.DEFAULT_CONDITION_RESULT
@property
def process_definition(self) -> Optional[ProcessDefinition]:
"""Get attribute process_definition."""
return self._process_definition
@process_definition.setter
def process_definition(self, process_definition: Optional[ProcessDefinition]):
"""Set attribute process_definition."""
self._process_definition = process_definition
@property
def condition_result(self) -> Dict:
"""Get attribute condition_result."""
return self._condition_result
@condition_result.setter
def condition_result(self, condition_result: Optional[Dict]):
"""Set attribute condition_result."""
self._condition_result = condition_result
@property
def task_params(self) -> Optional[Dict]:
"""Get task parameter object.
Will get result to combine _task_custom_attr and custom_attr.
"""
custom_attr = {
"local_params",
"resource_list",
"dependence",
"wait_start_timeout",
"condition_result",
}
custom_attr |= self._task_custom_attr
return self.get_define_custom(custom_attr=custom_attr)
def __hash__(self):
return hash(self.code)
def __lshift__(self, other: Union["Task", Sequence["Task"]]):
"""Implement Task << Task."""
self.set_upstream(other)
return other
def __rshift__(self, other: Union["Task", Sequence["Task"]]):
"""Implement Task >> Task."""
self.set_downstream(other)
return other
def __rrshift__(self, other: Union["Task", Sequence["Task"]]):
"""Call for Task >> [Task] because list don't have __rshift__ operators."""
self.__lshift__(other)
return self
def __rlshift__(self, other: Union["Task", Sequence["Task"]]):
"""Call for Task << [Task] because list don't have __lshift__ operators."""
self.__rshift__(other)
return self
def _set_deps(
self, tasks: Union["Task", Sequence["Task"]], upstream: bool = True
) -> None:
"""
Set parameter tasks dependent to current task.
it is a wrapper for :func:`set_upstream` and :func:`set_downstream`.
"""
if not isinstance(tasks, Sequence):
tasks = [tasks]
for task in tasks:
if upstream:
self._upstream_task_codes.add(task.code)
task._downstream_task_codes.add(self.code)
if self._process_definition:
task_relation = TaskRelation(
pre_task_code=task.code,
post_task_code=self.code,
name=f"{task.name} {Delimiter.DIRECTION} {self.name}",
)
self.process_definition._task_relations.add(task_relation)
else:
self._downstream_task_codes.add(task.code)
task._upstream_task_codes.add(self.code)
if self._process_definition:
task_relation = TaskRelation(
pre_task_code=self.code,
post_task_code=task.code,
name=f"{self.name} {Delimiter.DIRECTION} {task.name}",
)
self.process_definition._task_relations.add(task_relation)
def set_upstream(self, tasks: Union["Task", Sequence["Task"]]) -> None:
"""Set parameter tasks as upstream to current task."""
self._set_deps(tasks, upstream=True)
def set_downstream(self, tasks: Union["Task", Sequence["Task"]]) -> None:
"""Set parameter tasks as downstream to current task."""
self._set_deps(tasks, upstream=False)
# TODO code should better generate in bulk mode when :ref: processDefinition run submit or start
def gen_code_and_version(self) -> Tuple:
"""
Generate task code and version from java gateway.
If task name do not exists in process definition before, if will generate new code and version id
equal to 0 by java gateway, otherwise if will return the exists code and version.
"""
# TODO get code from specific project process definition and task name
gateway = launch_gateway()
result = gateway.entry_point.getCodeAndVersion(
self.process_definition._project, self.name
)
# result = gateway.entry_point.genTaskCodeList(DefaultTaskCodeNum.DEFAULT)
# gateway_result_checker(result)
return result.get("code"), result.get("version")
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,258 | [Feature][python] Custom log for meanful an easier log formatter | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
ATT, for now we just use logging and it should be confgire each file we want to add log, maybe we should add a custom logger to do it
maybe we could just add
```py
log = logging.getLogger(__name__)
```
in each place wo call `logging`
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8258 | https://github.com/apache/dolphinscheduler/pull/8901 | 9a93910ac5af8512e6f02f664995897e8e0459a5 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | "2022-01-29T08:10:54Z" | java | "2022-03-15T11:16:33Z" | dolphinscheduler-python/pydolphinscheduler/tests/core/test_task.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test Task class function."""
from unittest.mock import patch
import pytest
from pydolphinscheduler.core.task import Task, TaskRelation
from tests.testing.task import Task as testTask
TEST_TASK_RELATION_SET = set()
TEST_TASK_RELATION_SIZE = 0
@pytest.mark.parametrize(
"attr, expect",
[
(
dict(),
{
"localParams": [],
"resourceList": [],
"dependence": {},
"waitStartTimeout": {},
"conditionResult": {"successNode": [""], "failedNode": [""]},
},
),
(
{
"local_params": ["foo", "bar"],
"resource_list": ["foo", "bar"],
"dependence": {"foo", "bar"},
"wait_start_timeout": {"foo", "bar"},
"condition_result": {"foo": ["bar"]},
},
{
"localParams": ["foo", "bar"],
"resourceList": ["foo", "bar"],
"dependence": {"foo", "bar"},
"waitStartTimeout": {"foo", "bar"},
"conditionResult": {"foo": ["bar"]},
},
),
],
)
def test_property_task_params(attr, expect):
"""Test class task property."""
task = testTask(
"test-property-task-params",
"test-task",
**attr,
)
assert expect == task.task_params
@pytest.mark.parametrize(
"pre_code, post_code, expect",
[
(123, 456, hash("123 -> 456")),
(12345678, 987654321, hash("12345678 -> 987654321")),
],
)
def test_task_relation_hash_func(pre_code, post_code, expect):
"""Test TaskRelation magic function :func:`__hash__`."""
task_param = TaskRelation(pre_task_code=pre_code, post_task_code=post_code)
assert hash(task_param) == expect
@pytest.mark.parametrize(
"pre_code, post_code, size_add",
[
(123, 456, 1),
(123, 456, 0),
(456, 456, 1),
(123, 123, 1),
(456, 123, 1),
(0, 456, 1),
(123, 0, 1),
],
)
def test_task_relation_add_to_set(pre_code, post_code, size_add):
"""Test TaskRelation with different pre_code and post_code add to set behavior.
Here we use global variable to keep set of :class:`TaskRelation` instance and the number we expect
of the size when we add a new task relation to exists set.
"""
task_relation = TaskRelation(pre_task_code=pre_code, post_task_code=post_code)
TEST_TASK_RELATION_SET.add(task_relation)
# hint python interpreter use global variable instead of local's
global TEST_TASK_RELATION_SIZE
TEST_TASK_RELATION_SIZE += size_add
assert len(TEST_TASK_RELATION_SET) == TEST_TASK_RELATION_SIZE
def test_task_relation_to_dict():
"""Test TaskRelation object function to_dict."""
pre_task_code = 123
post_task_code = 456
expect = {
"name": "",
"preTaskCode": pre_task_code,
"postTaskCode": post_task_code,
"preTaskVersion": 1,
"postTaskVersion": 1,
"conditionType": 0,
"conditionParams": {},
}
task_relation = TaskRelation(
pre_task_code=pre_task_code, post_task_code=post_task_code
)
assert task_relation.get_define() == expect
def test_task_get_define():
"""Test Task object function get_define."""
code = 123
version = 1
name = "test_task_get_define"
task_type = "test_task_get_define_type"
expect = {
"code": code,
"name": name,
"version": version,
"description": None,
"delayTime": 0,
"taskType": task_type,
"taskParams": {
"resourceList": [],
"localParams": [],
"dependence": {},
"conditionResult": {"successNode": [""], "failedNode": [""]},
"waitStartTimeout": {},
},
"flag": "YES",
"taskPriority": "MEDIUM",
"workerGroup": "default",
"failRetryTimes": 0,
"failRetryInterval": 1,
"timeoutFlag": "CLOSE",
"timeoutNotifyStrategy": None,
"timeout": 0,
}
with patch(
"pydolphinscheduler.core.task.Task.gen_code_and_version",
return_value=(code, version),
):
task = Task(name=name, task_type=task_type)
assert task.get_define() == expect
@pytest.mark.parametrize("shift", ["<<", ">>"])
def test_two_tasks_shift(shift: str):
"""Test bit operator between tasks.
Here we test both `>>` and `<<` bit operator.
"""
upstream = testTask(name="upstream", task_type=shift)
downstream = testTask(name="downstream", task_type=shift)
if shift == "<<":
downstream << upstream
elif shift == ">>":
upstream >> downstream
else:
assert False, f"Unexpect bit operator type {shift}."
assert (
1 == len(upstream._downstream_task_codes)
and downstream.code in upstream._downstream_task_codes
), "Task downstream task attributes error, downstream codes size or specific code failed."
assert (
1 == len(downstream._upstream_task_codes)
and upstream.code in downstream._upstream_task_codes
), "Task upstream task attributes error, upstream codes size or upstream code failed."
@pytest.mark.parametrize(
"dep_expr, flag",
[
("task << tasks", "upstream"),
("tasks << task", "downstream"),
("task >> tasks", "downstream"),
("tasks >> task", "upstream"),
],
)
def test_tasks_list_shift(dep_expr: str, flag: str):
"""Test bit operator between task and sequence of tasks.
Here we test both `>>` and `<<` bit operator.
"""
reverse_dict = {
"upstream": "downstream",
"downstream": "upstream",
}
task_type = "dep_task_and_tasks"
task = testTask(name="upstream", task_type=task_type)
tasks = [
testTask(name="downstream1", task_type=task_type),
testTask(name="downstream2", task_type=task_type),
]
# Use build-in function eval to simply test case and reduce duplicate code
eval(dep_expr)
direction_attr = f"_{flag}_task_codes"
reverse_direction_attr = f"_{reverse_dict[flag]}_task_codes"
assert 2 == len(getattr(task, direction_attr))
assert [t.code in getattr(task, direction_attr) for t in tasks]
assert all([1 == len(getattr(t, reverse_direction_attr)) for t in tasks])
assert all([task.code in getattr(t, reverse_direction_attr) for t in tasks])
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,258 | [Feature][python] Custom log for meanful an easier log formatter | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
ATT, for now we just use logging and it should be confgire each file we want to add log, maybe we should add a custom logger to do it
maybe we could just add
```py
log = logging.getLogger(__name__)
```
in each place wo call `logging`
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8258 | https://github.com/apache/dolphinscheduler/pull/8901 | 9a93910ac5af8512e6f02f664995897e8e0459a5 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | "2022-01-29T08:10:54Z" | java | "2022-03-15T11:16:33Z" | dolphinscheduler-python/pydolphinscheduler/tests/testing/task.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Mock class Task for other test."""
import uuid
from pydolphinscheduler.core.task import Task as SourceTask
class Task(SourceTask):
"""Mock class :class:`pydolphinscheduler.core.task.Task` for unittest."""
DEFAULT_VERSION = 1
def gen_code_and_version(self):
"""Mock java gateway code and version, convenience method for unittest."""
return uuid.uuid1().time, self.DEFAULT_VERSION
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,808 | [Bug-FE][UI Next][V1.0.0-Alpha]Task instance logs are not automatically updated | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Task instance logs are not automatically updated
<img width="1917" alt="image" src="https://user-images.githubusercontent.com/76080484/157622048-000faeae-86d3-469c-98fd-d3ae26f4aa9c.png">
### What you expected to happen
Task instance logs can be updated automatically
### How to reproduce
1. Execute a long workflow
2. View task instance logs
3. The log is not updated. You need to close the log window and open it again to update the latest log
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8808 | https://github.com/apache/dolphinscheduler/pull/8912 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | bde7d52c5d8322ebc6897771cc10725aefb19e9b | "2022-03-10T08:42:36Z" | java | "2022-03-15T12:58:10Z" | dolphinscheduler-ui-next/src/components/modal/index.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, renderSlot, Ref } from 'vue'
import { NModal, NCard, NButton, NSpace } from 'naive-ui'
import ButtonLink from '@/components/button-link'
import { useI18n } from 'vue-i18n'
import styles from './index.module.scss'
import { LinkOption } from '@/components/modal/types'
const props = {
show: {
type: Boolean as PropType<boolean>,
default: false
},
title: {
type: String as PropType<string>,
required: true
},
cancelText: {
type: String as PropType<string>
},
cancelShow: {
type: Boolean as PropType<boolean>,
default: true
},
confirmText: {
type: String as PropType<string>
},
confirmClassName: {
type: String as PropType<string>,
default: ''
},
cancelClassName: {
type: String as PropType<string>,
default: ''
},
confirmDisabled: {
type: Boolean as PropType<boolean>,
default: false
},
confirmLoading: {
type: Boolean as PropType<boolean>,
default: false
},
autoFocus: {
type: Boolean as PropType<boolean>,
default: true
},
headerLinks: {
type: Object as PropType<Ref<Array<LinkOption>>>,
default: [] as LinkOption[]
}
}
const Modal = defineComponent({
name: 'Modal',
props,
emits: ['cancel', 'confirm', 'jumpLink'],
setup(props, ctx) {
const { t } = useI18n()
const onCancel = () => {
ctx.emit('cancel')
}
const onConfirm = () => {
ctx.emit('confirm')
}
return { t, onCancel, onConfirm }
},
render() {
const { $slots, t, onCancel, onConfirm, confirmDisabled, confirmLoading } =
this
return (
<NModal
v-model={[this.show, 'show']}
class={styles.container}
mask-closable={false}
auto-focus={this.autoFocus}
>
<NCard
title={this.title}
class={styles['modal-card']}
contentStyle={{ overflowY: 'auto' }}
>
{{
default: () => renderSlot($slots, 'default'),
'header-extra': () => (
<NSpace justify='end'>
{this.headerLinks.value &&
this.headerLinks.value
.filter((item: any) => item.show)
.map((item: any) => {
return (
<ButtonLink onClick={item.action}>
{{
default: () => item.text,
icon: () => item.icon()
}}
</ButtonLink>
)
})}
</NSpace>
),
footer: () => (
<NSpace justify='end'>
{this.cancelShow && (
<NButton
class={this.cancelClassName}
quaternary
size='small'
onClick={onCancel}
>
{this.cancelText || t('modal.cancel')}
</NButton>
)}
{/* TODO: Add left and right slots later */}
{renderSlot($slots, 'btn-middle')}
<NButton
class={this.confirmClassName}
type='info'
size='small'
onClick={onConfirm}
disabled={confirmDisabled}
loading={confirmLoading}
>
{this.confirmText || t('modal.confirm')}
</NButton>
</NSpace>
)
}}
</NCard>
</NModal>
)
}
})
export default Modal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,808 | [Bug-FE][UI Next][V1.0.0-Alpha]Task instance logs are not automatically updated | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Task instance logs are not automatically updated
<img width="1917" alt="image" src="https://user-images.githubusercontent.com/76080484/157622048-000faeae-86d3-469c-98fd-d3ae26f4aa9c.png">
### What you expected to happen
Task instance logs can be updated automatically
### How to reproduce
1. Execute a long workflow
2. View task instance logs
3. The log is not updated. You need to close the log window and open it again to update the latest log
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8808 | https://github.com/apache/dolphinscheduler/pull/8912 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | bde7d52c5d8322ebc6897771cc10725aefb19e9b | "2022-03-10T08:42:36Z" | java | "2022-03-15T12:58:10Z" | dolphinscheduler-ui-next/src/locales/modules/en_US.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const login = {
test: 'Test',
userName: 'Username',
userName_tips: 'Please enter your username',
userPassword: 'Password',
userPassword_tips: 'Please enter your password',
login: 'Login'
}
const modal = {
cancel: 'Cancel',
confirm: 'Confirm'
}
const theme = {
light: 'Light',
dark: 'Dark'
}
const userDropdown = {
profile: 'Profile',
password: 'Password',
logout: 'Logout'
}
const menu = {
home: 'Home',
project: 'Project',
resources: 'Resources',
datasource: 'Datasource',
monitor: 'Monitor',
security: 'Security',
project_overview: 'Project Overview',
workflow_relation: 'Workflow Relation',
workflow: 'Workflow',
workflow_definition: 'Workflow Definition',
workflow_instance: 'Workflow Instance',
task: 'Task',
task_instance: 'Task Instance',
task_definition: 'Task Definition',
file_manage: 'File Manage',
udf_manage: 'UDF Manage',
resource_manage: 'Resource Manage',
function_manage: 'Function Manage',
service_manage: 'Service Manage',
master: 'Master',
worker: 'Worker',
db: 'DB',
statistical_manage: 'Statistical Manage',
statistics: 'Statistics',
audit_log: 'Audit Log',
tenant_manage: 'Tenant Manage',
user_manage: 'User Manage',
alarm_group_manage: 'Alarm Group Manage',
alarm_instance_manage: 'Alarm Instance Manage',
worker_group_manage: 'Worker Group Manage',
yarn_queue_manage: 'Yarn Queue Manage',
environment_manage: 'Environment Manage',
k8s_namespace_manage: 'K8S Namespace Manage',
token_manage: 'Token Manage',
task_group_manage: 'Task Group Manage',
task_group_option: 'Task Group Option',
task_group_queue: 'Task Group Queue',
data_quality: 'Data Quality',
task_result: 'Task Result',
rule: 'Rule management'
}
const home = {
task_state_statistics: 'Task State Statistics',
process_state_statistics: 'Process State Statistics',
process_definition_statistics: 'Process Definition Statistics',
number: 'Number',
state: 'State',
submitted_success: 'SUBMITTED_SUCCESS',
running_execution: 'RUNNING_EXECUTION',
ready_pause: 'READY_PAUSE',
pause: 'PAUSE',
ready_stop: 'READY_STOP',
stop: 'STOP',
failure: 'FAILURE',
success: 'SUCCESS',
need_fault_tolerance: 'NEED_FAULT_TOLERANCE',
kill: 'KILL',
waiting_thread: 'WAITING_THREAD',
waiting_depend: 'WAITING_DEPEND',
delay_execution: 'DELAY_EXECUTION',
forced_success: 'FORCED_SUCCESS',
serial_wait: 'SERIAL_WAIT'
}
const password = {
edit_password: 'Edit Password',
password: 'Password',
confirm_password: 'Confirm Password',
password_tips: 'Please enter your password',
confirm_password_tips: 'Please enter your confirm password',
two_password_entries_are_inconsistent:
'Two password entries are inconsistent',
submit: 'Submit'
}
const profile = {
profile: 'Profile',
edit: 'Edit',
username: 'Username',
email: 'Email',
phone: 'Phone',
state: 'State',
permission: 'Permission',
create_time: 'Create Time',
update_time: 'Update Time',
administrator: 'Administrator',
ordinary_user: 'Ordinary User',
edit_profile: 'Edit Profile',
username_tips: 'Please enter your username',
email_tips: 'Please enter your email',
email_correct_tips: 'Please enter your email in the correct format',
phone_tips: 'Please enter your phone',
state_tips: 'Please choose your state',
enable: 'Enable',
disable: 'Disable',
timezone_success: 'Time zone updated successful',
please_select_timezone: 'Choose timeZone'
}
const monitor = {
master: {
cpu_usage: 'CPU Usage',
memory_usage: 'Memory Usage',
load_average: 'Load Average',
create_time: 'Create Time',
last_heartbeat_time: 'Last Heartbeat Time',
directory_detail: 'Directory Detail',
host: 'Host',
directory: 'Directory'
},
worker: {
cpu_usage: 'CPU Usage',
memory_usage: 'Memory Usage',
load_average: 'Load Average',
create_time: 'Create Time',
last_heartbeat_time: 'Last Heartbeat Time',
directory_detail: 'Directory Detail',
host: 'Host',
directory: 'Directory'
},
db: {
health_state: 'Health State',
max_connections: 'Max Connections',
threads_connections: 'Threads Connections',
threads_running_connections: 'Threads Running Connections'
},
statistics: {
command_number_of_waiting_for_running:
'Command Number Of Waiting For Running',
failure_command_number: 'Failure Command Number',
tasks_number_of_waiting_running: 'Tasks Number Of Waiting Running',
task_number_of_ready_to_kill: 'Task Number Of Ready To Kill'
},
audit_log: {
user_name: 'User Name',
resource_type: 'Resource Type',
project_name: 'Project Name',
operation_type: 'Operation Type',
create_time: 'Create Time',
start_time: 'Start Time',
end_time: 'End Time',
user_audit: 'User Audit',
project_audit: 'Project Audit',
create: 'Create',
update: 'Update',
delete: 'Delete',
read: 'Read'
}
}
const resource = {
file: {
file_manage: 'File Manage',
create_folder: 'Create Folder',
create_file: 'Create File',
upload_files: 'Upload Files',
enter_keyword_tips: 'Please enter keyword',
name: 'Name',
user_name: 'Resource userName',
whether_directory: 'Whether directory',
file_name: 'File Name',
description: 'Description',
size: 'Size',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
rename: 'Rename',
download: 'Download',
delete: 'Delete',
yes: 'Yes',
no: 'No',
folder_name: 'Folder Name',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description',
enter_content_tips: 'Please enter the resource content',
file_format: 'File Format',
file_content: 'File Content',
delete_confirm: 'Delete?',
confirm: 'Confirm',
cancel: 'Cancel',
success: 'Success',
file_details: 'File Details',
return: 'Return',
save: 'Save'
},
udf: {
udf_resources: 'UDF resources',
create_folder: 'Create Folder',
upload_udf_resources: 'Upload UDF Resources',
udf_source_name: 'UDF Resource Name',
whether_directory: 'Whether directory',
file_name: 'File Name',
file_size: 'File Size',
description: 'Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
yes: 'Yes',
no: 'No',
edit: 'Edit',
download: 'Download',
delete: 'Delete',
delete_confirm: 'Delete?',
success: 'Success',
folder_name: 'Folder Name',
upload: 'Upload',
upload_files: 'Upload Files',
file_upload: 'File Upload',
enter_keyword_tips: 'Please enter keyword',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description'
},
function: {
udf_function: 'UDF Function',
create_udf_function: 'Create UDF Function',
edit_udf_function: 'Create UDF Function',
udf_function_name: 'UDF Function Name',
class_name: 'Class Name',
type: 'Type',
description: 'Description',
jar_package: 'Jar Package',
update_time: 'Update Time',
operation: 'Operation',
rename: 'Rename',
edit: 'Edit',
delete: 'Delete',
success: 'Success',
package_name: 'Package Name',
udf_resources: 'UDF Resources',
instructions: 'Instructions',
upload_resources: 'Upload Resources',
udf_resources_directory: 'UDF resources directory',
delete_confirm: 'Delete?',
enter_keyword_tips: 'Please enter keyword',
enter_udf_unction_name_tips: 'Please enter a UDF function name',
enter_package_name_tips: 'Please enter a Package name',
enter_select_udf_resources_tips: 'Please select UDF resources',
enter_select_udf_resources_directory_tips:
'Please select UDF resources directory',
enter_instructions_tips: 'Please enter a instructions',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description'
},
task_group_option: {
manage: 'Task group manage',
option: 'Task group option',
create: 'Create task group',
edit: 'Edit task group',
delete: 'Delete task group',
view_queue: 'View the queue of the task group',
switch_status: 'Switch status',
code: 'Task group code',
name: 'Task group name',
project_name: 'Project name',
resource_pool_size: 'Resource pool size',
resource_pool_size_be_a_number:
'The size of the task group resource pool should be more than 1',
resource_used_pool_size: 'Used resource',
desc: 'Task group desc',
status: 'Task group status',
enable_status: 'Enable',
disable_status: 'Disable',
please_enter_name: 'Please enter task group name',
please_enter_desc: 'Please enter task group description',
please_enter_resource_pool_size:
'Please enter task group resource pool size',
please_select_project: 'Please select a project',
create_time: 'Create time',
update_time: 'Update time',
actions: 'Actions',
please_enter_keywords: 'Please enter keywords'
},
task_group_queue: {
actions: 'Actions',
task_name: 'Task name',
task_group_name: 'Task group name',
project_name: 'Project name',
process_name: 'Process name',
process_instance_name: 'Process instance',
queue: 'Task group queue',
priority: 'Priority',
priority_be_a_number:
'The priority of the task group queue should be a positive number',
force_starting_status: 'Starting status',
in_queue: 'In queue',
task_status: 'Task status',
view: 'View task group queue',
the_status_of_waiting: 'Waiting into the queue',
the_status_of_queuing: 'Queuing',
the_status_of_releasing: 'Released',
modify_priority: 'Edit the priority',
start_task: 'Start the task',
priority_not_empty: 'The value of priority can not be empty',
priority_must_be_number: 'The value of priority should be number',
please_select_task_name: 'Please select a task name',
create_time: 'Create time',
update_time: 'Update time',
edit_priority: 'Edit the task priority'
}
}
const project = {
list: {
create_project: 'Create Project',
edit_project: 'Edit Project',
project_list: 'Project List',
project_tips: 'Please enter your project',
description_tips: 'Please enter your description',
username_tips: 'Please enter your username',
project_name: 'Project Name',
project_description: 'Project Description',
owned_users: 'Owned Users',
workflow_define_count: 'Workflow Define Count',
process_instance_running_count: 'Process Instance Running Count',
description: 'Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
delete_confirm: 'Delete?'
},
workflow: {
workflow_relation: 'Workflow Relation',
create_workflow: 'Create Workflow',
import_workflow: 'Import Workflow',
workflow_name: 'Workflow Name',
current_selection: 'Current Selection',
online: 'Online',
offline: 'Offline',
refresh: 'Refresh',
show_hide_label: 'Show / Hide Label',
workflow_offline: 'Workflow Offline',
schedule_offline: 'Schedule Offline',
schedule_start_time: 'Schedule Start Time',
schedule_end_time: 'Schedule End Time',
crontab_expression: 'Crontab',
workflow_publish_status: 'Workflow Publish Status',
schedule_publish_status: 'Schedule Publish Status',
workflow_definition: 'Workflow Definition',
workflow_instance: 'Workflow Instance',
status: 'Status',
create_time: 'Create Time',
update_time: 'Update Time',
description: 'Description',
create_user: 'Create User',
modify_user: 'Modify User',
operation: 'Operation',
edit: 'Edit',
start: 'Start',
timing: 'Timing',
timezone: 'Timezone',
up_line: 'Online',
down_line: 'Offline',
copy_workflow: 'Copy Workflow',
cron_manage: 'Cron manage',
delete: 'Delete',
tree_view: 'Tree View',
tree_limit: 'Limit Size',
export: 'Export',
version_info: 'Version Info',
version: 'Version',
file_upload: 'File Upload',
upload_file: 'Upload File',
upload: 'Upload',
file_name: 'File Name',
success: 'Success',
set_parameters_before_starting: 'Please set the parameters before starting',
set_parameters_before_timing: 'Set parameters before timing',
start_and_stop_time: 'Start and stop time',
next_five_execution_times: 'Next five execution times',
execute_time: 'Execute time',
failure_strategy: 'Failure Strategy',
notification_strategy: 'Notification Strategy',
workflow_priority: 'Workflow Priority',
worker_group: 'Worker Group',
environment_name: 'Environment Name',
alarm_group: 'Alarm Group',
complement_data: 'Complement Data',
startup_parameter: 'Startup Parameter',
whether_dry_run: 'Whether Dry-Run',
continue: 'Continue',
end: 'End',
none_send: 'None',
success_send: 'Success',
failure_send: 'Failure',
all_send: 'All',
whether_complement_data: 'Whether it is a complement process?',
schedule_date: 'Schedule date',
mode_of_execution: 'Mode of execution',
serial_execution: 'Serial execution',
parallel_execution: 'Parallel execution',
parallelism: 'Parallelism',
custom_parallelism: 'Custom Parallelism',
please_enter_parallelism: 'Please enter Parallelism',
please_choose: 'Please Choose',
start_time: 'Start Time',
end_time: 'End Time',
crontab: 'Crontab',
delete_confirm: 'Delete?',
enter_name_tips: 'Please enter name',
switch_version: 'Switch To This Version',
confirm_switch_version: 'Confirm Switch To This Version?',
current_version: 'Current Version',
run_type: 'Run Type',
scheduling_time: 'Scheduling Time',
duration: 'Duration',
run_times: 'Run Times',
fault_tolerant_sign: 'Fault-tolerant Sign',
dry_run_flag: 'Dry-run Flag',
executor: 'Executor',
host: 'Host',
start_process: 'Start Process',
execute_from_the_current_node: 'Execute from the current node',
recover_tolerance_fault_process: 'Recover tolerance fault process',
resume_the_suspension_process: 'Resume the suspension process',
execute_from_the_failed_nodes: 'Execute from the failed nodes',
scheduling_execution: 'Scheduling execution',
rerun: 'Rerun',
stop: 'Stop',
pause: 'Pause',
recovery_waiting_thread: 'Recovery waiting thread',
recover_serial_wait: 'Recover serial wait',
recovery_suspend: 'Recovery Suspend',
recovery_failed: 'Recovery Failed',
gantt: 'Gantt',
name: 'Name',
all_status: 'AllStatus',
submit_success: 'Submitted successfully',
running: 'Running',
ready_to_pause: 'Ready to pause',
ready_to_stop: 'Ready to stop',
failed: 'Failed',
need_fault_tolerance: 'Need fault tolerance',
kill: 'Kill',
waiting_for_thread: 'Waiting for thread',
waiting_for_dependence: 'Waiting for dependence',
waiting_for_dependency_to_complete: 'Waiting for dependency to complete',
delay_execution: 'Delay execution',
forced_success: 'Forced success',
serial_wait: 'Serial wait',
executing: 'Executing',
startup_type: 'Startup Type',
complement_range: 'Complement Range',
parameters_variables: 'Parameters variables',
global_parameters: 'Global parameters',
local_parameters: 'Local parameters',
type: 'Type',
retry_count: 'Retry Count',
submit_time: 'Submit Time',
refresh_status_succeeded: 'Refresh status succeeded',
view_log: 'View log',
update_log_success: 'Update log success',
no_more_log: 'No more logs',
no_log: 'No log',
loading_log: 'Loading Log...',
close: 'Close',
download_log: 'Download Log',
refresh_log: 'Refresh Log',
enter_full_screen: 'Enter full screen',
cancel_full_screen: 'Cancel full screen',
task_state: 'Task status',
mode_of_dependent: 'Mode of dependent',
open: 'Open'
},
task: {
task_name: 'Task Name',
task_type: 'Task Type',
create_task: 'Create Task',
workflow_instance: 'Workflow Instance',
workflow_name: 'Workflow Name',
workflow_name_tips: 'Please select workflow name',
workflow_state: 'Workflow State',
version: 'Version',
current_version: 'Current Version',
switch_version: 'Switch To This Version',
confirm_switch_version: 'Confirm Switch To This Version?',
description: 'Description',
move: 'Move',
upstream_tasks: 'Upstream Tasks',
executor: 'Executor',
node_type: 'Node Type',
state: 'State',
submit_time: 'Submit Time',
start_time: 'Start Time',
create_time: 'Create Time',
update_time: 'Update Time',
end_time: 'End Time',
duration: 'Duration',
retry_count: 'Retry Count',
dry_run_flag: 'Dry Run Flag',
host: 'Host',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?',
submitted_success: 'Submitted Success',
running_execution: 'Running Execution',
ready_pause: 'Ready Pause',
pause: 'Pause',
ready_stop: 'Ready Stop',
stop: 'Stop',
failure: 'Failure',
success: 'Success',
need_fault_tolerance: 'Need Fault Tolerance',
kill: 'Kill',
waiting_thread: 'Waiting Thread',
waiting_depend: 'Waiting Depend',
delay_execution: 'Delay Execution',
forced_success: 'Forced Success',
view_log: 'View Log',
download_log: 'Download Log'
},
dag: {
create: 'Create Workflow',
search: 'Search',
download_png: 'Download PNG',
fullscreen_open: 'Open Fullscreen',
fullscreen_close: 'Close Fullscreen',
save: 'Save',
close: 'Close',
format: 'Format',
refresh_dag_status: 'Refresh DAG status',
layout_type: 'Layout Type',
grid_layout: 'Grid',
dagre_layout: 'Dagre',
rows: 'Rows',
cols: 'Cols',
copy_success: 'Copy Success',
workflow_name: 'Workflow Name',
description: 'Description',
tenant: 'Tenant',
timeout_alert: 'Timeout Alert',
global_variables: 'Global Variables',
basic_info: 'Basic Information',
minute: 'Minute',
key: 'Key',
value: 'Value',
success: 'Success',
delete_cell: 'Delete selected edges and nodes',
online_directly: 'Whether to go online the process definition',
dag_name_empty: 'DAG graph name cannot be empty',
positive_integer: 'Please enter a positive integer greater than 0',
prop_empty: 'prop is empty',
prop_repeat: 'prop is repeat',
node_not_created: 'Failed to save node not created',
copy_name: 'Copy Name',
view_variables: 'View Variables',
startup_parameter: 'Startup Parameter'
},
node: {
current_node_settings: 'Current node settings',
instructions: 'Instructions',
view_history: 'View history',
view_log: 'View log',
enter_this_child_node: 'Enter this child node',
name: 'Node name',
name_tips: 'Please enter name (required)',
task_type: 'Task Type',
task_type_tips: 'Please select a task type (required)',
process_name: 'Process Name',
process_name_tips: 'Please select a process (required)',
child_node: 'Child Node',
enter_child_node: 'Enter child node',
run_flag: 'Run flag',
normal: 'Normal',
prohibition_execution: 'Prohibition execution',
description: 'Description',
description_tips: 'Please enter description',
task_priority: 'Task priority',
worker_group: 'Worker group',
worker_group_tips:
'The Worker group no longer exists, please select the correct Worker group!',
environment_name: 'Environment Name',
task_group_name: 'Task group name',
task_group_queue_priority: 'Priority',
number_of_failed_retries: 'Number of failed retries',
times: 'Times',
failed_retry_interval: 'Failed retry interval',
minute: 'Minute',
delay_execution_time: 'Delay execution time',
state: 'State',
branch_flow: 'Branch flow',
cancel: 'Cancel',
loading: 'Loading...',
confirm: 'Confirm',
success: 'Success',
failed: 'Failed',
backfill_tips:
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
task_instance_tips:
'The task has not been executed and cannot enter the sub-Process',
branch_tips:
'Cannot select the same node for successful branch flow and failed branch flow',
timeout_alarm: 'Timeout alarm',
timeout_strategy: 'Timeout strategy',
timeout_strategy_tips: 'Timeout strategy must be selected',
timeout_failure: 'Timeout failure',
timeout_period: 'Timeout period',
timeout_period_tips: 'Timeout must be a positive integer',
script: 'Script',
script_tips: 'Please enter script(required)',
resources: 'Resources',
resources_tips: 'Please select resources',
non_resources_tips: 'Please delete all non-existent resources',
useless_resources_tips: 'Unauthorized or deleted resources',
custom_parameters: 'Custom Parameters',
copy_success: 'Copy success',
copy_failed: 'The browser does not support automatic copying',
prop_tips: 'prop(required)',
prop_repeat: 'prop is repeat',
value_tips: 'value(optional)',
value_required_tips: 'value(required)',
pre_tasks: 'Pre tasks',
program_type: 'Program Type',
spark_version: 'Spark Version',
main_class: 'Main Class',
main_class_tips: 'Please enter main class',
main_package: 'Main Package',
main_package_tips: 'Please enter main package',
deploy_mode: 'Deploy Mode',
app_name: 'App Name',
app_name_tips: 'Please enter app name(optional)',
driver_cores: 'Driver Cores',
driver_cores_tips: 'Please enter Driver cores',
driver_memory: 'Driver Memory',
driver_memory_tips: 'Please enter Driver memory',
executor_number: 'Executor Number',
executor_number_tips: 'Please enter Executor number',
executor_memory: 'Executor Memory',
executor_memory_tips: 'Please enter Executor memory',
executor_cores: 'Executor Cores',
executor_cores_tips: 'Please enter Executor cores',
main_arguments: 'Main Arguments',
main_arguments_tips: 'Please enter main arguments',
option_parameters: 'Option Parameters',
option_parameters_tips: 'Please enter option parameters',
positive_integer_tips: 'should be a positive integer',
flink_version: 'Flink Version',
job_manager_memory: 'JobManager Memory',
job_manager_memory_tips: 'Please enter JobManager memory',
task_manager_memory: 'TaskManager Memory',
task_manager_memory_tips: 'Please enter TaskManager memory',
slot_number: 'Slot Number',
slot_number_tips: 'Please enter Slot number',
parallelism: 'Parallelism',
custom_parallelism: 'Configure parallelism',
parallelism_tips: 'Please enter Parallelism',
parallelism_number_tips: 'Parallelism number should be positive integer',
parallelism_complement_tips:
'If there are a large number of tasks requiring complement, you can use the custom parallelism to ' +
'set the complement task thread to a reasonable value to avoid too large impact on the server.',
task_manager_number: 'TaskManager Number',
task_manager_number_tips: 'Please enter TaskManager number',
http_url: 'Http Url',
http_url_tips: 'Please Enter Http Url',
http_method: 'Http Method',
http_parameters: 'Http Parameters',
http_check_condition: 'Http Check Condition',
http_condition: 'Http Condition',
http_condition_tips: 'Please Enter Http Condition',
timeout_settings: 'Timeout Settings',
connect_timeout: 'Connect Timeout',
ms: 'ms',
socket_timeout: 'Socket Timeout',
status_code_default: 'Default response code 200',
status_code_custom: 'Custom response code',
body_contains: 'Content includes',
body_not_contains: 'Content does not contain',
http_parameters_position: 'Http Parameters Position',
target_task_name: 'Target Task Name',
target_task_name_tips: 'Please enter the Pigeon task name',
datasource_type: 'Datasource types',
datasource_instances: 'Datasource instances',
sql_type: 'SQL Type',
sql_type_query: 'Query',
sql_type_non_query: 'Non Query',
sql_statement: 'SQL Statement',
pre_sql_statement: 'Pre SQL Statement',
post_sql_statement: 'Post SQL Statement',
sql_input_placeholder: 'Please enter non-query sql.',
sql_empty_tips: 'The sql can not be empty.',
procedure_method: 'SQL Statement',
procedure_method_tips: 'Please enter the procedure script',
procedure_method_snippet:
'--Please enter the procedure script \n\n--call procedure:call <procedure-name>[(<arg1>,<arg2>, ...)]\n\n--call function:?= call <procedure-name>[(<arg1>,<arg2>, ...)]',
start: 'Start',
edit: 'Edit',
copy: 'Copy',
delete: 'Delete',
custom_job: 'Custom Job',
custom_script: 'Custom Script',
sqoop_job_name: 'Job Name',
sqoop_job_name_tips: 'Please enter Job Name(required)',
direct: 'Direct',
hadoop_custom_params: 'Hadoop Params',
sqoop_advanced_parameters: 'Sqoop Advanced Parameters',
data_source: 'Data Source',
type: 'Type',
datasource: 'Datasource',
datasource_tips: 'Please select the datasource',
model_type: 'ModelType',
form: 'Form',
table: 'Table',
table_tips: 'Please enter Mysql Table(required)',
column_type: 'ColumnType',
all_columns: 'All Columns',
some_columns: 'Some Columns',
column: 'Column',
column_tips: 'Please enter Columns (Comma separated)',
database: 'Database',
database_tips: 'Please enter Hive Database(required)',
hive_table_tips: 'Please enter Hive Table(required)',
hive_partition_keys: 'Hive partition Keys',
hive_partition_keys_tips: 'Please enter Hive Partition Keys',
hive_partition_values: 'Hive partition Values',
hive_partition_values_tips: 'Please enter Hive Partition Values',
export_dir: 'Export Dir',
export_dir_tips: 'Please enter Export Dir(required)',
sql_statement_tips: 'SQL Statement(required)',
map_column_hive: 'Map Column Hive',
map_column_java: 'Map Column Java',
data_target: 'Data Target',
create_hive_table: 'CreateHiveTable',
drop_delimiter: 'DropDelimiter',
over_write_src: 'OverWriteSrc',
hive_target_dir: 'Hive Target Dir',
hive_target_dir_tips: 'Please enter hive target dir',
replace_delimiter: 'ReplaceDelimiter',
replace_delimiter_tips: 'Please enter Replace Delimiter',
target_dir: 'Target Dir',
target_dir_tips: 'Please enter Target Dir(required)',
delete_target_dir: 'DeleteTargetDir',
compression_codec: 'CompressionCodec',
file_type: 'FileType',
fields_terminated: 'FieldsTerminated',
fields_terminated_tips: 'Please enter Fields Terminated',
lines_terminated: 'LinesTerminated',
lines_terminated_tips: 'Please enter Lines Terminated',
is_update: 'IsUpdate',
update_key: 'UpdateKey',
update_key_tips: 'Please enter Update Key',
update_mode: 'UpdateMode',
only_update: 'OnlyUpdate',
allow_insert: 'AllowInsert',
concurrency: 'Concurrency',
concurrency_tips: 'Please enter Concurrency',
sea_tunnel_master: 'Master',
sea_tunnel_master_url: 'Master URL',
sea_tunnel_queue: 'Queue',
sea_tunnel_master_url_tips:
'Please enter the master url, e.g., 127.0.0.1:7077',
switch_condition: 'Condition',
switch_branch_flow: 'Branch Flow',
and: 'and',
or: 'or',
datax_custom_template: 'Custom Template Switch',
datax_json_template: 'JSON',
datax_target_datasource_type: 'Target Datasource Type',
datax_target_database: 'Target Database',
datax_target_table: 'Target Table',
datax_target_table_tips: 'Please enter the name of the target table',
datax_target_database_pre_sql: 'Pre SQL Statement',
datax_target_database_post_sql: 'Post SQL Statement',
datax_non_query_sql_tips: 'Please enter the non-query sql statement',
datax_job_speed_byte: 'Speed(Byte count)',
datax_job_speed_byte_info: '(0 means unlimited)',
datax_job_speed_record: 'Speed(Record count)',
datax_job_speed_record_info: '(0 means unlimited)',
datax_job_runtime_memory: 'Runtime Memory Limits',
datax_job_runtime_memory_xms: 'Low Limit Value',
datax_job_runtime_memory_xmx: 'High Limit Value',
datax_job_runtime_memory_unit: 'G',
current_hour: 'CurrentHour',
last_1_hour: 'Last1Hour',
last_2_hour: 'Last2Hours',
last_3_hour: 'Last3Hours',
last_24_hour: 'Last24Hours',
today: 'today',
last_1_days: 'Last1Days',
last_2_days: 'Last2Days',
last_3_days: 'Last3Days',
last_7_days: 'Last7Days',
this_week: 'ThisWeek',
last_week: 'LastWeek',
last_monday: 'LastMonday',
last_tuesday: 'LastTuesday',
last_wednesday: 'LastWednesday',
last_thursday: 'LastThursday',
last_friday: 'LastFriday',
last_saturday: 'LastSaturday',
last_sunday: 'LastSunday',
this_month: 'ThisMonth',
last_month: 'LastMonth',
last_month_begin: 'LastMonthBegin',
last_month_end: 'LastMonthEnd',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
add_dependency: 'Add dependency',
waiting_dependent_start: 'Waiting Dependent start',
check_interval: 'Check interval',
waiting_dependent_complete: 'Waiting Dependent complete',
rule_name: 'Rule Name',
null_check: 'NullCheck',
custom_sql: 'CustomSql',
multi_table_accuracy: 'MulTableAccuracy',
multi_table_value_comparison: 'MulTableCompare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
required: 'required',
emr_flow_define_json: 'jobFlowDefineJson',
emr_flow_define_json_tips: 'Please enter the definition of the job flow.'
}
}
const security = {
tenant: {
tenant_manage: 'Tenant Manage',
create_tenant: 'Create Tenant',
search_tips: 'Please enter keywords',
tenant_code: 'Operating System Tenant',
description: 'Description',
queue_name: 'QueueName',
create_time: 'Create Time',
update_time: 'Update Time',
actions: 'Operation',
edit_tenant: 'Edit Tenant',
tenant_code_tips: 'Please enter the operating system tenant',
queue_name_tips: 'Please select queue',
description_tips: 'Please enter a description',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete'
},
alarm_group: {
create_alarm_group: 'Create Alarm Group',
edit_alarm_group: 'Edit Alarm Group',
search_tips: 'Please enter keywords',
alert_group_name_tips: 'Please enter your alert group name',
alarm_plugin_instance: 'Alarm Plugin Instance',
alarm_plugin_instance_tips: 'Please select alert plugin instance',
alarm_group_description_tips: 'Please enter your alarm group description',
alert_group_name: 'Alert Group Name',
alarm_group_description: 'Alarm Group Description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete'
},
worker_group: {
create_worker_group: 'Create Worker Group',
edit_worker_group: 'Edit Worker Group',
search_tips: 'Please enter keywords',
operation: 'Operation',
delete_confirm: 'Delete?',
edit: 'Edit',
delete: 'Delete',
group_name: 'Group Name',
group_name_tips: 'Please enter your group name',
worker_addresses: 'Worker Addresses',
worker_addresses_tips: 'Please select worker addresses',
create_time: 'Create Time',
update_time: 'Update Time'
},
yarn_queue: {
create_queue: 'Create Queue',
edit_queue: 'Edit Queue',
search_tips: 'Please enter keywords',
queue_name: 'Queue Name',
queue_value: 'Queue Value',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
queue_name_tips: 'Please enter your queue name',
queue_value_tips: 'Please enter your queue value'
},
environment: {
create_environment: 'Create Environment',
edit_environment: 'Edit Environment',
search_tips: 'Please enter keywords',
edit: 'Edit',
delete: 'Delete',
environment_name: 'Environment Name',
environment_config: 'Environment Config',
environment_desc: 'Environment Desc',
worker_groups: 'Worker Groups',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
delete_confirm: 'Delete?',
environment_name_tips: 'Please enter your environment name',
environment_config_tips: 'Please enter your environment config',
environment_description_tips: 'Please enter your environment description',
worker_group_tips: 'Please select worker group'
},
token: {
create_token: 'Create Token',
edit_token: 'Edit Token',
search_tips: 'Please enter keywords',
user: 'User',
user_tips: 'Please select user',
token: 'Token',
token_tips: 'Please enter your token',
expiration_time: 'Expiration Time',
expiration_time_tips: 'Please select expiration time',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?'
},
user: {
user_manage: 'User Manage',
create_user: 'Create User',
update_user: 'Update User',
delete_user: 'Delete User',
delete_confirm: 'Are you sure to delete?',
delete_confirm_tip:
'Deleting user is a dangerous operation,please be careful',
project: 'Project',
resource: 'Resource',
file_resource: 'File Resource',
udf_resource: 'UDF Resource',
datasource: 'Datasource',
udf: 'UDF Function',
authorize_project: 'Project Authorize',
authorize_resource: 'Resource Authorize',
authorize_datasource: 'Datasource Authorize',
authorize_udf: 'UDF Function Authorize',
username: 'Username',
username_exists: 'The username already exists',
username_tips: 'Please enter username',
user_password: 'Password',
user_password_tips:
'Please enter a password containing letters and numbers with a length between 6 and 20',
user_type: 'User Type',
ordinary_user: 'Ordinary users',
administrator: 'Administrator',
tenant_code: 'Tenant',
tenant_id_tips: 'Please select tenant',
queue: 'Queue',
queue_tips: 'Please select a queue',
email: 'Email',
email_empty_tips: 'Please enter email',
emial_correct_tips: 'Please enter the correct email format',
phone: 'Phone',
phone_empty_tips: 'Please enter phone number',
phone_correct_tips: 'Please enter the correct mobile phone format',
state: 'State',
state_enabled: 'Enabled',
state_disabled: 'Disabled',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
authorize: 'Authorize',
save_error_msg: 'Failed to save, please retry',
delete_error_msg: 'Failed to delete, please retry',
auth_error_msg: 'Failed to authorize, please retry',
auth_success_msg: 'Authorize succeeded',
enable: 'Enable',
disable: 'Disable'
},
alarm_instance: {
search_input_tips: 'Please input the keywords',
alarm_instance_manage: 'Alarm instance manage',
alarm_instance: 'Alarm Instance',
alarm_instance_name: 'Alarm instance name',
alarm_instance_name_tips: 'Please enter alarm plugin instance name',
alarm_plugin_name: 'Alarm plugin name',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
submit: 'Submit',
create: 'Create',
select_plugin: 'Select plugin',
select_plugin_tips: 'Select Alarm plugin',
instance_parameter_exception: 'Instance parameter exception',
WebHook: 'WebHook',
webHook: 'WebHook',
IsEnableProxy: 'Enable Proxy',
Proxy: 'Proxy',
Port: 'Port',
User: 'User',
corpId: 'CorpId',
secret: 'Secret',
Secret: 'Secret',
users: 'Users',
userSendMsg: 'UserSendMsg',
agentId: 'AgentId',
showType: 'Show Type',
receivers: 'Receivers',
receiverCcs: 'ReceiverCcs',
serverHost: 'SMTP Host',
serverPort: 'SMTP Port',
sender: 'Sender',
enableSmtpAuth: 'SMTP Auth',
Password: 'Password',
starttlsEnable: 'SMTP STARTTLS Enable',
sslEnable: 'SMTP SSL Enable',
smtpSslTrust: 'SMTP SSL Trust',
url: 'URL',
requestType: 'Request Type',
headerParams: 'Headers',
bodyParams: 'Body',
contentField: 'Content Field',
Keyword: 'Keyword',
userParams: 'User Params',
path: 'Script Path',
type: 'Type',
sendType: 'Send Type',
username: 'Username',
botToken: 'Bot Token',
chatId: 'Channel Chat Id',
parseMode: 'Parse Mode'
},
k8s_namespace: {
create_namespace: 'Create Namespace',
edit_namespace: 'Edit Namespace',
search_tips: 'Please enter keywords',
k8s_namespace: 'K8S Namespace',
k8s_namespace_tips: 'Please enter k8s namespace',
k8s_cluster: 'K8S Cluster',
k8s_cluster_tips: 'Please enter k8s cluster',
owner: 'Owner',
owner_tips: 'Please enter owner',
tag: 'Tag',
tag_tips: 'Please enter tag',
limit_cpu: 'Limit CPU',
limit_cpu_tips: 'Please enter limit CPU',
limit_memory: 'Limit Memory',
limit_memory_tips: 'Please enter limit memory',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
edit: 'Edit',
delete: 'Delete',
delete_confirm: 'Delete?'
}
}
const datasource = {
datasource: 'DataSource',
create_datasource: 'Create DataSource',
search_input_tips: 'Please input the keywords',
datasource_name: 'Datasource Name',
datasource_name_tips: 'Please enter datasource name',
datasource_user_name: 'Owner',
datasource_type: 'Datasource Type',
datasource_parameter: 'Datasource Parameter',
description: 'Description',
description_tips: 'Please enter description',
create_time: 'Create Time',
update_time: 'Update Time',
operation: 'Operation',
click_to_view: 'Click to view',
delete: 'Delete',
confirm: 'Confirm',
cancel: 'Cancel',
create: 'Create',
edit: 'Edit',
success: 'Success',
test_connect: 'Test Connect',
ip: 'IP',
ip_tips: 'Please enter IP',
port: 'Port',
port_tips: 'Please enter port',
database_name: 'Database Name',
database_name_tips: 'Please enter database name',
oracle_connect_type: 'ServiceName or SID',
oracle_connect_type_tips: 'Please select serviceName or SID',
oracle_service_name: 'ServiceName',
oracle_sid: 'SID',
jdbc_connect_parameters: 'jdbc connect parameters',
principal_tips: 'Please enter Principal',
krb5_conf_tips:
'Please enter the kerberos authentication parameter java.security.krb5.conf',
keytab_username_tips:
'Please enter the kerberos authentication parameter login.user.keytab.username',
keytab_path_tips:
'Please enter the kerberos authentication parameter login.user.keytab.path',
format_tips: 'Please enter format',
connection_parameter: 'connection parameter',
user_name: 'User Name',
user_name_tips: 'Please enter your username',
user_password: 'Password',
user_password_tips: 'Please enter your password'
}
const data_quality = {
task_result: {
task_name: 'Task Name',
workflow_instance: 'Workflow Instance',
rule_type: 'Rule Type',
rule_name: 'Rule Name',
state: 'State',
actual_value: 'Actual Value',
excepted_value: 'Excepted Value',
check_type: 'Check Type',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'Failure Strategy',
excepted_value_type: 'Excepted Value Type',
error_output_path: 'Error Output Path',
username: 'Username',
create_time: 'Create Time',
update_time: 'Update Time',
undone: 'Undone',
success: 'Success',
failure: 'Failure',
single_table: 'Single Table',
single_table_custom_sql: 'Single Table Custom Sql',
multi_table_accuracy: 'Multi Table Accuracy',
multi_table_comparison: 'Multi Table Comparison',
expected_and_actual_or_expected: '(Expected - Actual) / Expected x 100%',
expected_and_actual: 'Expected - Actual',
actual_and_expected: 'Actual - Expected',
actual_or_expected: 'Actual / Expected x 100%'
},
rule: {
actions: 'Actions',
name: 'Rule Name',
type: 'Rule Type',
username: 'User Name',
create_time: 'Create Time',
update_time: 'Update Time',
input_item: 'Rule input item',
view_input_item: 'View input items',
input_item_title: 'Input item title',
input_item_placeholder: 'Input item placeholder',
input_item_type: 'Input item type',
src_connector_type: 'SrcConnType',
src_datasource_id: 'SrcSource',
src_table: 'SrcTable',
src_filter: 'SrcFilter',
src_field: 'SrcField',
statistics_name: 'ActualValName',
check_type: 'CheckType',
operator: 'Operator',
threshold: 'Threshold',
failure_strategy: 'FailureStrategy',
target_connector_type: 'TargetConnType',
target_datasource_id: 'TargetSourceId',
target_table: 'TargetTable',
target_filter: 'TargetFilter',
mapping_columns: 'OnClause',
statistics_execute_sql: 'ActualValExecSql',
comparison_name: 'ExceptedValName',
comparison_execute_sql: 'ExceptedValExecSql',
comparison_type: 'ExceptedValType',
writer_connector_type: 'WriterConnType',
writer_datasource_id: 'WriterSourceId',
target_field: 'TargetField',
field_length: 'FieldLength',
logic_operator: 'LogicOperator',
regexp_pattern: 'RegexpPattern',
deadline: 'Deadline',
datetime_format: 'DatetimeFormat',
enum_list: 'EnumList',
begin_time: 'BeginTime',
fix_value: 'FixValue',
null_check: 'NullCheck',
custom_sql: 'Custom Sql',
single_table: 'Single Table',
single_table_custom_sql: 'Single Table Custom Sql',
multi_table_accuracy: 'Multi Table Accuracy',
multi_table_value_comparison: 'Multi Table Compare',
field_length_check: 'FieldLengthCheck',
uniqueness_check: 'UniquenessCheck',
regexp_check: 'RegexpCheck',
timeliness_check: 'TimelinessCheck',
enumeration_check: 'EnumerationCheck',
table_count_check: 'TableCountCheck',
All: 'All',
FixValue: 'FixValue',
DailyAvg: 'DailyAvg',
WeeklyAvg: 'WeeklyAvg',
MonthlyAvg: 'MonthlyAvg',
Last7DayAvg: 'Last7DayAvg',
Last30DayAvg: 'Last30DayAvg',
SrcTableTotalRows: 'SrcTableTotalRows',
TargetTableTotalRows: 'TargetTableTotalRows'
}
}
const crontab = {
second: 'second',
minute: 'minute',
hour: 'hour',
day: 'day',
month: 'month',
year: 'year',
monday: 'Monday',
tuesday: 'Tuesday',
wednesday: 'Wednesday',
thursday: 'Thursday',
friday: 'Friday',
saturday: 'Saturday',
sunday: 'Sunday',
every_second: 'Every second',
every: 'Every',
second_carried_out: 'second carried out',
second_start: 'Start',
specific_second: 'Specific second(multiple)',
specific_second_tip: 'Please enter a specific second',
cycle_from: 'Cycle from',
to: 'to',
every_minute: 'Every minute',
minute_carried_out: 'minute carried out',
minute_start: 'Start',
specific_minute: 'Specific minute(multiple)',
specific_minute_tip: 'Please enter a specific minute',
every_hour: 'Every hour',
hour_carried_out: 'hour carried out',
hour_start: 'Start',
specific_hour: 'Specific hour(multiple)',
specific_hour_tip: 'Please enter a specific hour',
every_day: 'Every day',
week_carried_out: 'week carried out',
start: 'Start',
day_carried_out: 'day carried out',
day_start: 'Start',
specific_week: 'Specific day of the week(multiple)',
specific_week_tip: 'Please enter a specific week',
specific_day: 'Specific days(multiple)',
specific_day_tip: 'Please enter a days',
last_day_of_month: 'On the last day of the month',
last_work_day_of_month: 'On the last working day of the month',
last_of_month: 'At the last of this month',
before_end_of_month: 'Before the end of this month',
recent_business_day_to_month:
'The most recent business day (Monday to Friday) to this month',
in_this_months: 'In this months',
every_month: 'Every month',
month_carried_out: 'month carried out',
month_start: 'Start',
specific_month: 'Specific months(multiple)',
specific_month_tip: 'Please enter a months',
every_year: 'Every year',
year_carried_out: 'year carried out',
year_start: 'Start',
specific_year: 'Specific year(multiple)',
specific_year_tip: 'Please enter a year',
one_hour: 'hour',
one_day: 'day'
}
export default {
login,
modal,
theme,
userDropdown,
menu,
home,
password,
profile,
monitor,
resource,
project,
security,
datasource,
data_quality,
crontab
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,808 | [Bug-FE][UI Next][V1.0.0-Alpha]Task instance logs are not automatically updated | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Task instance logs are not automatically updated
<img width="1917" alt="image" src="https://user-images.githubusercontent.com/76080484/157622048-000faeae-86d3-469c-98fd-d3ae26f4aa9c.png">
### What you expected to happen
Task instance logs can be updated automatically
### How to reproduce
1. Execute a long workflow
2. View task instance logs
3. The log is not updated. You need to close the log window and open it again to update the latest log
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8808 | https://github.com/apache/dolphinscheduler/pull/8912 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | bde7d52c5d8322ebc6897771cc10725aefb19e9b | "2022-03-10T08:42:36Z" | java | "2022-03-15T12:58:10Z" | dolphinscheduler-ui-next/src/locales/modules/zh_CN.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const login = {
test: '测试',
userName: '用户名',
userName_tips: '请输入用户名',
userPassword: '密码',
userPassword_tips: '请输入密码',
login: '登录'
}
const modal = {
cancel: '取消',
confirm: '确定'
}
const theme = {
light: '浅色',
dark: '深色'
}
const userDropdown = {
profile: '用户信息',
password: '密码管理',
logout: '退出登录'
}
const menu = {
home: '首页',
project: '项目管理',
resources: '资源中心',
datasource: '数据源中心',
monitor: '监控中心',
security: '安全中心',
project_overview: '项目概览',
workflow_relation: '工作流关系',
workflow: '工作流',
workflow_definition: '工作流定义',
workflow_instance: '工作流实例',
task: '任务',
task_instance: '任务实例',
task_definition: '任务定义',
file_manage: '文件管理',
udf_manage: 'UDF管理',
resource_manage: '资源管理',
function_manage: '函数管理',
service_manage: '服务管理',
master: 'Master',
worker: 'Worker',
db: 'DB',
statistical_manage: '统计管理',
statistics: 'Statistics',
audit_log: '审计日志',
tenant_manage: '租户管理',
user_manage: '用户管理',
alarm_group_manage: '告警组管理',
alarm_instance_manage: '告警实例管理',
worker_group_manage: 'Worker分组管理',
yarn_queue_manage: 'Yarn队列管理',
environment_manage: '环境管理',
k8s_namespace_manage: 'K8S命名空间管理',
token_manage: '令牌管理',
task_group_manage: '任务组管理',
task_group_option: '任务组配置',
task_group_queue: '任务组队列',
data_quality: '数据质量',
task_result: '任务结果',
rule: '规则管理'
}
const home = {
task_state_statistics: '任务状态统计',
process_state_statistics: '流程状态统计',
process_definition_statistics: '流程定义统计',
number: '数量',
state: '状态',
submitted_success: '提交成功',
running_execution: '正在运行',
ready_pause: '准备暂停',
pause: '暂停',
ready_stop: '准备停止',
stop: '停止',
failure: '失败',
success: '成功',
need_fault_tolerance: '需要容错',
kill: 'KILL',
waiting_thread: '等待线程',
waiting_depend: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
serial_wait: '串行等待'
}
const password = {
edit_password: '修改密码',
password: '密码',
confirm_password: '确认密码',
password_tips: '请输入密码',
confirm_password_tips: '请输入确认密码',
two_password_entries_are_inconsistent: '两次密码输入不一致',
submit: '提交'
}
const profile = {
profile: '用户信息',
edit: '编辑',
username: '用户名',
email: '邮箱',
phone: '手机',
state: '状态',
permission: '权限',
create_time: '创建时间',
update_time: '更新时间',
administrator: '管理员',
ordinary_user: '普通用户',
edit_profile: '编辑用户',
username_tips: '请输入用户名',
email_tips: '请输入邮箱',
email_correct_tips: '请输入正确格式的邮箱',
phone_tips: '请输入手机号',
state_tips: '请选择状态',
enable: '启用',
disable: '禁用',
timezone_success: '时区更新成功',
please_select_timezone: '请选择时区'
}
const monitor = {
master: {
cpu_usage: '处理器使用量',
memory_usage: '内存使用量',
load_average: '平均负载量',
create_time: '创建时间',
last_heartbeat_time: '最后心跳时间',
directory_detail: '目录详情',
host: '主机',
directory: '注册目录'
},
worker: {
cpu_usage: '处理器使用量',
memory_usage: '内存使用量',
load_average: '平均负载量',
create_time: '创建时间',
last_heartbeat_time: '最后心跳时间',
directory_detail: '目录详情',
host: '主机',
directory: '注册目录'
},
db: {
health_state: '健康状态',
max_connections: '最大连接数',
threads_connections: '当前连接数',
threads_running_connections: '数据库当前活跃连接数'
},
statistics: {
command_number_of_waiting_for_running: '待执行的命令数',
failure_command_number: '执行失败的命令数',
tasks_number_of_waiting_running: '待运行任务数',
task_number_of_ready_to_kill: '待杀死任务数'
},
audit_log: {
user_name: '用户名称',
resource_type: '资源类型',
project_name: '项目名称',
operation_type: '操作类型',
create_time: '创建时间',
start_time: '开始时间',
end_time: '结束时间',
user_audit: '用户管理审计',
project_audit: '项目管理审计',
create: '创建',
update: '更新',
delete: '删除',
read: '读取'
}
}
const resource = {
file: {
file_manage: '文件管理',
create_folder: '创建文件夹',
create_file: '创建文件',
upload_files: '上传文件',
enter_keyword_tips: '请输入关键词',
name: '名称',
user_name: '所属用户',
whether_directory: '是否文件夹',
file_name: '文件名称',
description: '描述',
size: '大小',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
rename: '重命名',
download: '下载',
delete: '删除',
yes: '是',
no: '否',
folder_name: '文件夹名称',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述',
enter_content_tips: '请输入资源内容',
enter_suffix_tips: '请输入文件后缀',
file_format: '文件格式',
file_content: '文件内容',
delete_confirm: '确定删除吗?',
confirm: '确定',
cancel: '取消',
success: '成功',
file_details: '文件详情',
return: '返回',
save: '保存'
},
udf: {
udf_resources: 'UDF资源',
create_folder: '创建文件夹',
upload_udf_resources: '上传UDF资源',
udf_source_name: 'UDF资源名称',
whether_directory: '是否文件夹',
file_name: '文件名称',
file_size: '文件大小',
description: '描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
yes: '是',
no: '否',
edit: '编辑',
download: '下载',
delete: '删除',
success: '成功',
folder_name: '文件夹名称',
upload: '上传',
upload_files: '上传文件',
file_upload: '文件上传',
delete_confirm: '确定删除吗?',
enter_keyword_tips: '请输入关键词',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述'
},
function: {
udf_function: 'UDF函数',
create_udf_function: '创建UDF函数',
edit_udf_function: '编辑UDF函数',
udf_function_name: 'UDF函数名称',
class_name: '类名',
type: '类型',
description: '描述',
jar_package: 'jar包',
update_time: '更新时间',
operation: '操作',
rename: '重命名',
edit: '编辑',
delete: '删除',
success: '成功',
package_name: '包名类名',
udf_resources: 'UDF资源',
instructions: '使用说明',
upload_resources: '上传资源',
udf_resources_directory: 'UDF资源目录',
delete_confirm: '确定删除吗?',
enter_keyword_tips: '请输入关键词',
enter_udf_unction_name_tips: '请输入UDF函数名称',
enter_package_name_tips: '请输入包名类名',
enter_select_udf_resources_tips: '请选择UDF资源',
enter_select_udf_resources_directory_tips: '请选择UDF资源目录',
enter_instructions_tips: '请输入使用说明',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述'
},
task_group_option: {
manage: '任务组管理',
option: '任务组配置',
create: '创建任务组',
edit: '编辑任务组',
delete: '删除任务组',
view_queue: '查看任务组队列',
switch_status: '切换任务组状态',
code: '任务组编号',
name: '任务组名称',
project_name: '项目名称',
resource_pool_size: '资源容量',
resource_used_pool_size: '已用资源',
desc: '描述信息',
status: '任务组状态',
enable_status: '启用',
disable_status: '不可用',
please_enter_name: '请输入任务组名称',
please_enter_desc: '请输入任务组描述',
please_enter_resource_pool_size: '请输入资源容量大小',
resource_pool_size_be_a_number: '资源容量大小必须大于等于1的数值',
please_select_project: '请选择项目',
create_time: '创建时间',
update_time: '更新时间',
actions: '操作',
please_enter_keywords: '请输入搜索关键词'
},
task_group_queue: {
actions: '操作',
task_name: '任务名称',
task_group_name: '任务组名称',
project_name: '项目名称',
process_name: '工作流名称',
process_instance_name: '工作流实例',
queue: '任务组队列',
priority: '组内优先级',
priority_be_a_number: '优先级必须是大于等于0的数值',
force_starting_status: '是否强制启动',
in_queue: '是否排队中',
task_status: '任务状态',
view_task_group_queue: '查看任务组队列',
the_status_of_waiting: '等待入队',
the_status_of_queuing: '排队中',
the_status_of_releasing: '已释放',
modify_priority: '修改优先级',
start_task: '强制启动',
priority_not_empty: '优先级不能为空',
priority_must_be_number: '优先级必须是数值',
please_select_task_name: '请选择节点名称',
create_time: '创建时间',
update_time: '更新时间',
edit_priority: '修改优先级'
}
}
const project = {
list: {
create_project: '创建项目',
edit_project: '编辑项目',
project_list: '项目列表',
project_tips: '请输入项目名称',
description_tips: '请输入项目描述',
username_tips: '请输入所属用户',
project_name: '项目名称',
project_description: '项目描述',
owned_users: '所属用户',
workflow_define_count: '工作流定义数',
process_instance_running_count: '正在运行的流程数',
description: '描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
confirm: '确定',
cancel: '取消',
delete_confirm: '确定删除吗?'
},
workflow: {
workflow_relation: '工作流关系',
create_workflow: '创建工作流',
import_workflow: '导入工作流',
workflow_name: '工作流名称',
current_selection: '当前选择',
online: '已上线',
offline: '已下线',
refresh: '刷新',
show_hide_label: '显示 / 隐藏标签',
workflow_offline: '工作流下线',
schedule_offline: '调度下线',
schedule_start_time: '定时开始时间',
schedule_end_time: '定时结束时间',
crontab_expression: 'Crontab',
workflow_publish_status: '工作流上线状态',
schedule_publish_status: '定时状态',
workflow_definition: '工作流定义',
workflow_instance: '工作流实例',
status: '状态',
create_time: '创建时间',
update_time: '更新时间',
description: '描述',
create_user: '创建用户',
modify_user: '修改用户',
operation: '操作',
edit: '编辑',
confirm: '确定',
cancel: '取消',
start: '运行',
timing: '定时',
timezone: '时区',
up_line: '上线',
down_line: '下线',
copy_workflow: '复制工作流',
cron_manage: '定时管理',
delete: '删除',
tree_view: '工作流树形图',
tree_limit: '限制大小',
export: '导出',
version_info: '版本信息',
version: '版本',
file_upload: '文件上传',
upload_file: '上传文件',
upload: '上传',
file_name: '文件名称',
success: '成功',
set_parameters_before_starting: '启动前请先设置参数',
set_parameters_before_timing: '定时前请先设置参数',
start_and_stop_time: '起止时间',
next_five_execution_times: '接下来五次执行时间',
execute_time: '执行时间',
failure_strategy: '失败策略',
notification_strategy: '通知策略',
workflow_priority: '流程优先级',
worker_group: 'Worker分组',
environment_name: '环境名称',
alarm_group: '告警组',
complement_data: '补数',
startup_parameter: '启动参数',
whether_dry_run: '是否空跑',
continue: '继续',
end: '结束',
none_send: '都不发',
success_send: '成功发',
failure_send: '失败发',
all_send: '成功或失败都发',
whether_complement_data: '是否是补数',
schedule_date: '调度日期',
mode_of_execution: '执行方式',
serial_execution: '串行执行',
parallel_execution: '并行执行',
parallelism: '并行度',
custom_parallelism: '自定义并行度',
please_enter_parallelism: '请输入并行度',
please_choose: '请选择',
start_time: '开始时间',
end_time: '结束时间',
crontab: 'Crontab',
delete_confirm: '确定删除吗?',
enter_name_tips: '请输入名称',
switch_version: '切换到该版本',
confirm_switch_version: '确定切换到该版本吗?',
current_version: '当前版本',
run_type: '运行类型',
scheduling_time: '调度时间',
duration: '运行时长',
run_times: '运行次数',
fault_tolerant_sign: '容错标识',
dry_run_flag: '空跑标识',
executor: '执行用户',
host: 'Host',
start_process: '启动工作流',
execute_from_the_current_node: '从当前节点开始执行',
recover_tolerance_fault_process: '恢复被容错的工作流',
resume_the_suspension_process: '恢复运行流程',
execute_from_the_failed_nodes: '从失败节点开始执行',
scheduling_execution: '调度执行',
rerun: '重跑',
stop: '停止',
pause: '暂停',
recovery_waiting_thread: '恢复等待线程',
recover_serial_wait: '串行恢复',
recovery_suspend: '恢复运行',
recovery_failed: '恢复失败',
gantt: '甘特图',
name: '名称',
all_status: '全部状态',
submit_success: '提交成功',
running: '正在运行',
ready_to_pause: '准备暂停',
ready_to_stop: '准备停止',
failed: '失败',
need_fault_tolerance: '需要容错',
kill: 'Kill',
waiting_for_thread: '等待线程',
waiting_for_dependence: '等待依赖',
waiting_for_dependency_to_complete: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
serial_wait: '串行等待',
executing: '正在执行',
startup_type: '启动类型',
complement_range: '补数范围',
parameters_variables: '参数变量',
global_parameters: '全局参数',
local_parameters: '局部参数',
type: '类型',
retry_count: '重试次数',
submit_time: '提交时间',
refresh_status_succeeded: '刷新状态成功',
view_log: '查看日志',
update_log_success: '更新日志成功',
no_more_log: '暂无更多日志',
no_log: '暂无日志',
loading_log: '正在努力请求日志中...',
close: '关闭',
download_log: '下载日志',
refresh_log: '刷新日志',
enter_full_screen: '进入全屏',
cancel_full_screen: '取消全屏',
task_state: '任务状态',
mode_of_dependent: '依赖模式',
open: '打开'
},
task: {
task_name: '任务名称',
task_type: '任务类型',
create_task: '创建任务',
workflow_instance: '工作流实例',
workflow_name: '工作流名称',
workflow_name_tips: '请选择工作流名称',
workflow_state: '工作流状态',
version: '版本',
current_version: '当前版本',
switch_version: '切换到该版本',
confirm_switch_version: '确定切换到该版本吗?',
description: '描述',
move: '移动',
upstream_tasks: '上游任务',
executor: '执行用户',
node_type: '节点类型',
state: '状态',
submit_time: '提交时间',
start_time: '开始时间',
create_time: '创建时间',
update_time: '更新时间',
end_time: '结束时间',
duration: '运行时间',
retry_count: '重试次数',
dry_run_flag: '空跑标识',
host: '主机',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?',
submitted_success: '提交成功',
running_execution: '正在运行',
ready_pause: '准备暂停',
pause: '暂停',
ready_stop: '准备停止',
stop: '停止',
failure: '失败',
success: '成功',
need_fault_tolerance: '需要容错',
kill: 'KILL',
waiting_thread: '等待线程',
waiting_depend: '等待依赖完成',
delay_execution: '延时执行',
forced_success: '强制成功',
view_log: '查看日志',
download_log: '下载日志'
},
dag: {
create: '创建工作流',
search: '搜索',
download_png: '下载工作流图片',
fullscreen_open: '全屏',
fullscreen_close: '退出全屏',
save: '保存',
close: '关闭',
format: '格式化',
refresh_dag_status: '刷新DAG状态',
layout_type: '布局类型',
grid_layout: '网格布局',
dagre_layout: '层次布局',
rows: '行数',
cols: '列数',
copy_success: '复制成功',
workflow_name: '工作流名称',
description: '描述',
tenant: '租户',
timeout_alert: '超时告警',
global_variables: '全局变量',
basic_info: '基本信息',
minute: '分',
key: '键',
value: '值',
success: '成功',
delete_cell: '删除选中的线或节点',
online_directly: '是否上线流程定义',
dag_name_empty: 'DAG图名称不能为空',
positive_integer: '请输入大于 0 的正整数',
prop_empty: '自定义参数prop不能为空',
prop_repeat: 'prop中有重复',
node_not_created: '未创建节点保存失败',
copy_name: '复制名称',
view_variables: '查看变量',
startup_parameter: '启动参数'
},
node: {
current_node_settings: '当前节点设置',
instructions: '使用说明',
view_history: '查看历史',
view_log: '查看日志',
enter_this_child_node: '进入该子节点',
name: '节点名称',
name_tips: '请输入名称(必填)',
task_type: '任务类型',
task_type_tips: '请选择任务类型(必选)',
process_name: '工作流名称',
process_name_tips: '请选择工作流(必选)',
child_node: '子节点',
enter_child_node: '进入该子节点',
run_flag: '运行标志',
normal: '正常',
prohibition_execution: '禁止执行',
description: '描述',
description_tips: '请输入描述',
task_priority: '任务优先级',
worker_group: 'Worker分组',
worker_group_tips: '该Worker分组已经不存在,请选择正确的Worker分组!',
environment_name: '环境名称',
task_group_name: '任务组名称',
task_group_queue_priority: '组内优先级',
number_of_failed_retries: '失败重试次数',
times: '次',
failed_retry_interval: '失败重试间隔',
minute: '分',
delay_execution_time: '延时执行时间',
state: '状态',
branch_flow: '分支流转',
cancel: '取消',
loading: '正在努力加载中...',
confirm: '确定',
success: '成功',
failed: '失败',
backfill_tips: '新创建子工作流还未执行,不能进入子工作流',
task_instance_tips: '该任务还未执行,不能进入子工作流',
branch_tips: '成功分支流转和失败分支流转不能选择同一个节点',
timeout_alarm: '超时告警',
timeout_strategy: '超时策略',
timeout_strategy_tips: '超时策略必须选一个',
timeout_failure: '超时失败',
timeout_period: '超时时长',
timeout_period_tips: '超时时长必须为正整数',
script: '脚本',
script_tips: '请输入脚本(必填)',
resources: '资源',
resources_tips: '请选择资源',
no_resources_tips: '请删除所有未授权或已删除资源',
useless_resources_tips: '未授权或已删除资源',
custom_parameters: '自定义参数',
copy_failed: '该浏览器不支持自动复制',
prop_tips: 'prop(必填)',
prop_repeat: 'prop中有重复',
value_tips: 'value(选填)',
value_required_tips: 'value(必填)',
pre_tasks: '前置任务',
program_type: '程序类型',
spark_version: 'Spark版本',
main_class: '主函数的Class',
main_class_tips: '请填写主函数的Class',
main_package: '主程序包',
main_package_tips: '请选择主程序包',
deploy_mode: '部署方式',
app_name: '任务名称',
app_name_tips: '请输入任务名称(选填)',
driver_cores: 'Driver核心数',
driver_cores_tips: '请输入Driver核心数',
driver_memory: 'Driver内存数',
driver_memory_tips: '请输入Driver内存数',
executor_number: 'Executor数量',
executor_number_tips: '请输入Executor数量',
executor_memory: 'Executor内存数',
executor_memory_tips: '请输入Executor内存数',
executor_cores: 'Executor核心数',
executor_cores_tips: '请输入Executor核心数',
main_arguments: '主程序参数',
main_arguments_tips: '请输入主程序参数',
option_parameters: '选项参数',
option_parameters_tips: '请输入选项参数',
positive_integer_tips: '应为正整数',
flink_version: 'Flink版本',
job_manager_memory: 'JobManager内存数',
job_manager_memory_tips: '请输入JobManager内存数',
task_manager_memory: 'TaskManager内存数',
task_manager_memory_tips: '请输入TaskManager内存数',
slot_number: 'Slot数量',
slot_number_tips: '请输入Slot数量',
parallelism: '并行度',
custom_parallelism: '自定义并行度',
parallelism_tips: '请输入并行度',
parallelism_number_tips: '并行度必须为正整数',
parallelism_complement_tips:
'如果存在大量任务需要补数时,可以利用自定义并行度将补数的任务线程设置成合理的数值,避免对服务器造成过大的影响',
task_manager_number: 'TaskManager数量',
task_manager_number_tips: '请输入TaskManager数量',
http_url: '请求地址',
http_url_tips: '请填写请求地址(必填)',
http_method: '请求类型',
http_parameters: '请求参数',
http_check_condition: '校验条件',
http_condition: '校验内容',
http_condition_tips: '请填写校验内容',
timeout_settings: '超时设置',
connect_timeout: '连接超时',
ms: '毫秒',
socket_timeout: 'Socket超时',
status_code_default: '默认响应码200',
status_code_custom: '自定义响应码',
body_contains: '内容包含',
body_not_contains: '内容不包含',
http_parameters_position: '参数位置',
target_task_name: '目标任务名',
target_task_name_tips: '请输入Pigeon任务名',
datasource_type: '数据源类型',
datasource_instances: '数据源实例',
sql_type: 'SQL类型',
sql_type_query: '查询',
sql_type_non_query: '非查询',
sql_statement: 'SQL语句',
pre_sql_statement: '前置SQL语句',
post_sql_statement: '后置SQL语句',
sql_input_placeholder: '请输入非查询SQL语句',
sql_empty_tips: '语句不能为空',
procedure_method: 'SQL语句',
procedure_method_tips: '请输入存储脚本',
procedure_method_snippet:
'--请输入存储脚本 \n\n--调用存储过程: call <procedure-name>[(<arg1>,<arg2>, ...)] \n\n--调用存储函数:?= call <procedure-name>[(<arg1>,<arg2>, ...)]',
start: '运行',
edit: '编辑',
copy: '复制节点',
delete: '删除',
custom_job: '自定义任务',
custom_script: '自定义脚本',
sqoop_job_name: '任务名称',
sqoop_job_name_tips: '请输入任务名称(必填)',
direct: '流向',
hadoop_custom_params: 'Hadoop参数',
sqoop_advanced_parameters: 'Sqoop参数',
data_source: '数据来源',
type: '类型',
datasource: '数据源',
datasource_tips: '请选择数据源',
model_type: '模式',
form: '表单',
table: '表名',
table_tips: '请输入Mysql表名(必填)',
column_type: '列类型',
all_columns: '全表导入',
some_columns: '选择列',
column: '列',
column_tips: '请输入列名,用 , 隔开',
database: '数据库',
database_tips: '请输入Hive数据库(必填)',
hive_table_tips: '请输入Hive表名(必填)',
hive_partition_keys: 'Hive 分区键',
hive_partition_keys_tips: '请输入分区键',
hive_partition_values: 'Hive 分区值',
hive_partition_values_tips: '请输入分区值',
export_dir: '数据源路径',
export_dir_tips: '请输入数据源路径(必填)',
sql_statement_tips: 'SQL语句(必填)',
map_column_hive: 'Hive类型映射',
map_column_java: 'Java类型映射',
data_target: '数据目的',
create_hive_table: '是否创建新表',
drop_delimiter: '是否删除分隔符',
over_write_src: '是否覆盖数据源',
hive_target_dir: 'Hive目标路径',
hive_target_dir_tips: '请输入Hive临时目录',
replace_delimiter: '替换分隔符',
replace_delimiter_tips: '请输入替换分隔符',
target_dir: '目标路径',
target_dir_tips: '请输入目标路径(必填)',
delete_target_dir: '是否删除目录',
compression_codec: '压缩类型',
file_type: '保存格式',
fields_terminated: '列分隔符',
fields_terminated_tips: '请输入列分隔符',
lines_terminated: '行分隔符',
lines_terminated_tips: '请输入行分隔符',
is_update: '是否更新',
update_key: '更新列',
update_key_tips: '请输入更新列',
update_mode: '更新类型',
only_update: '只更新',
allow_insert: '无更新便插入',
concurrency: '并发度',
concurrency_tips: '请输入并发度',
sea_tunnel_master: 'Master',
sea_tunnel_master_url: 'Master URL',
sea_tunnel_queue: '队列',
sea_tunnel_master_url_tips: '请直接填写地址,例如:127.0.0.1:7077',
switch_condition: '条件',
switch_branch_flow: '分支流转',
and: '且',
or: '或',
datax_custom_template: '自定义模板',
datax_json_template: 'JSON',
datax_target_datasource_type: '目标源类型',
datax_target_database: '目标源实例',
datax_target_table: '目标表',
datax_target_table_tips: '请输入目标表名',
datax_target_database_pre_sql: '目标库前置SQL',
datax_target_database_post_sql: '目标库后置SQL',
datax_non_query_sql_tips: '请输入非查询SQL语句',
datax_job_speed_byte: '限流(字节数)',
datax_job_speed_byte_info: '(KB,0代表不限制)',
datax_job_speed_record: '限流(记录数)',
datax_job_speed_record_info: '(0代表不限制)',
datax_job_runtime_memory: '运行内存',
datax_job_runtime_memory_xms: '最小内存',
datax_job_runtime_memory_xmx: '最大内存',
datax_job_runtime_memory_unit: 'G',
current_hour: '当前小时',
last_1_hour: '前1小时',
last_2_hour: '前2小时',
last_3_hour: '前3小时',
last_24_hour: '前24小时',
today: '今天',
last_1_days: '昨天',
last_2_days: '前两天',
last_3_days: '前三天',
last_7_days: '前七天',
this_week: '本周',
last_week: '上周',
last_monday: '上周一',
last_tuesday: '上周二',
last_wednesday: '上周三',
last_thursday: '上周四',
last_friday: '上周五',
last_saturday: '上周六',
last_sunday: '上周日',
this_month: '本月',
last_month: '上月',
last_month_begin: '上月初',
last_month_end: '上月末',
month: '月',
week: '周',
day: '日',
hour: '时',
add_dependency: '添加依赖',
waiting_dependent_start: '等待依赖启动',
check_interval: '检查间隔',
waiting_dependent_complete: '等待依赖完成',
rule_name: '规则名称',
null_check: '空值检测',
custom_sql: '自定义SQL',
multi_table_accuracy: '多表准确性',
multi_table_value_comparison: '两表值比对',
field_length_check: '字段长度校验',
uniqueness_check: '唯一性校验',
regexp_check: '正则表达式',
timeliness_check: '及时性校验',
enumeration_check: '枚举值校验',
table_count_check: '表行数校验',
src_connector_type: '源数据类型',
src_datasource_id: '源数据源',
src_table: '源数据表',
src_filter: '源表过滤条件',
src_field: '源表检测列',
statistics_name: '实际值名',
check_type: '校验方式',
operator: '校验操作符',
threshold: '阈值',
failure_strategy: '失败策略',
target_connector_type: '目标数据类型',
target_datasource_id: '目标数据源',
target_table: '目标数据表',
target_filter: '目标表过滤条件',
mapping_columns: 'ON语句',
statistics_execute_sql: '实际值计算SQL',
comparison_name: '期望值名',
comparison_execute_sql: '期望值计算SQL',
comparison_type: '期望值类型',
writer_connector_type: '输出数据类型',
writer_datasource_id: '输出数据源',
target_field: '目标表检测列',
field_length: '字段长度限制',
logic_operator: '逻辑操作符',
regexp_pattern: '正则表达式',
deadline: '截止时间',
datetime_format: '时间格式',
enum_list: '枚举值列表',
begin_time: '起始时间',
fix_value: '固定值',
required: '必填',
emr_flow_define_json: 'jobFlowDefineJson',
emr_flow_define_json_tips: '请输入工作流定义'
}
}
const security = {
tenant: {
tenant_manage: '租户管理',
create_tenant: '创建租户',
search_tips: '请输入关键词',
tenant_code: '操作系统租户',
description: '描述',
queue_name: '队列',
create_time: '创建时间',
update_time: '更新时间',
actions: '操作',
edit_tenant: '编辑租户',
tenant_code_tips: '请输入操作系统租户',
queue_name_tips: '请选择队列',
description_tips: '请输入描述',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除'
},
alarm_group: {
create_alarm_group: '创建告警组',
edit_alarm_group: '编辑告警组',
search_tips: '请输入关键词',
alert_group_name_tips: '请输入告警组名称',
alarm_plugin_instance: '告警组实例',
alarm_plugin_instance_tips: '请选择告警组实例',
alarm_group_description_tips: '请输入告警组描述',
alert_group_name: '告警组名称',
alarm_group_description: '告警组描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除'
},
worker_group: {
create_worker_group: '创建Worker分组',
edit_worker_group: '编辑Worker分组',
search_tips: '请输入关键词',
operation: '操作',
delete_confirm: '确定删除吗?',
edit: '编辑',
delete: '删除',
group_name: '分组名称',
group_name_tips: '请输入分组名称',
worker_addresses: 'Worker地址',
worker_addresses_tips: '请选择Worker地址',
create_time: '创建时间',
update_time: '更新时间'
},
yarn_queue: {
create_queue: '创建队列',
edit_queue: '编辑队列',
search_tips: '请输入关键词',
queue_name: '队列名',
queue_value: '队列值',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
queue_name_tips: '请输入队列名',
queue_value_tips: '请输入队列值'
},
environment: {
create_environment: '创建环境',
edit_environment: '编辑环境',
search_tips: '请输入关键词',
edit: '编辑',
delete: '删除',
environment_name: '环境名称',
environment_config: '环境配置',
environment_desc: '环境描述',
worker_groups: 'Worker分组',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
delete_confirm: '确定删除吗?',
environment_name_tips: '请输入环境名',
environment_config_tips: '请输入环境配置',
environment_description_tips: '请输入环境描述',
worker_group_tips: '请选择Worker分组'
},
token: {
create_token: '创建令牌',
edit_token: '编辑令牌',
search_tips: '请输入关键词',
user: '用户',
user_tips: '请选择用户',
token: '令牌',
token_tips: '请输入令牌',
expiration_time: '失效时间',
expiration_time_tips: '请选择失效时间',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?'
},
user: {
user_manage: '用户管理',
create_user: '创建用户',
update_user: '更新用户',
delete_user: '删除用户',
delete_confirm: '确定删除吗?',
project: '项目',
resource: '资源',
file_resource: '文件资源',
udf_resource: 'UDF资源',
datasource: '数据源',
udf: 'UDF函数',
authorize_project: '项目授权',
authorize_resource: '资源授权',
authorize_datasource: '数据源授权',
authorize_udf: 'UDF函数授权',
username: '用户名',
username_exists: '用户名已存在',
username_tips: '请输入用户名',
user_password: '密码',
user_password_tips: '请输入包含字母和数字,长度在6~20之间的密码',
user_type: '用户类型',
ordinary_user: '普通用户',
administrator: '管理员',
tenant_code: '租户',
tenant_id_tips: '请选择租户',
queue: '队列',
queue_tips: '默认为租户关联队列',
email: '邮件',
email_empty_tips: '请输入邮箱',
emial_correct_tips: '请输入正确的邮箱格式',
phone: '手机',
phone_empty_tips: '请输入手机号码',
phone_correct_tips: '请输入正确的手机格式',
state: '状态',
state_enabled: '启用',
state_disabled: '停用',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
authorize: '授权',
save_error_msg: '保存失败,请重试',
delete_error_msg: '删除失败,请重试',
auth_error_msg: '授权失败,请重试',
auth_success_msg: '授权成功',
enable: '启用',
disable: '停用'
},
alarm_instance: {
search_input_tips: '请输入关键字',
alarm_instance_manage: '告警实例管理',
alarm_instance: '告警实例',
alarm_instance_name: '告警实例名称',
alarm_instance_name_tips: '请输入告警实例名称',
alarm_plugin_name: '告警插件名称',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
confirm: '确定',
cancel: '取消',
submit: '提交',
create: '创建',
select_plugin: '选择插件',
select_plugin_tips: '请选择告警插件',
instance_parameter_exception: '实例参数异常',
WebHook: 'Web钩子',
webHook: 'Web钩子',
IsEnableProxy: '启用代理',
Proxy: '代理',
Port: '端口',
User: '用户',
corpId: '企业ID',
secret: '密钥',
Secret: '密钥',
users: '群员',
userSendMsg: '群员信息',
agentId: '应用ID',
showType: '内容展示类型',
receivers: '收件人',
receiverCcs: '抄送人',
serverHost: 'SMTP服务器',
serverPort: 'SMTP端口',
sender: '发件人',
enableSmtpAuth: '请求认证',
Password: '密码',
starttlsEnable: 'STARTTLS连接',
sslEnable: 'SSL连接',
smtpSslTrust: 'SSL证书信任',
url: 'URL',
requestType: '请求方式',
headerParams: '请求头',
bodyParams: '请求体',
contentField: '内容字段',
Keyword: '关键词',
userParams: '自定义参数',
path: '脚本路径',
type: '类型',
sendType: '发送类型',
username: '用户名',
botToken: '机器人Token',
chatId: '频道ID',
parseMode: '解析类型'
},
k8s_namespace: {
create_namespace: '创建命名空间',
edit_namespace: '编辑命名空间',
search_tips: '请输入关键词',
k8s_namespace: 'K8S命名空间',
k8s_namespace_tips: '请输入k8s命名空间',
k8s_cluster: 'K8S集群',
k8s_cluster_tips: '请输入k8s集群',
owner: '负责人',
owner_tips: '请输入负责人',
tag: '标签',
tag_tips: '请输入标签',
limit_cpu: '最大CPU',
limit_cpu_tips: '请输入最大CPU',
limit_memory: '最大内存',
limit_memory_tips: '请输入最大内存',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
edit: '编辑',
delete: '删除',
delete_confirm: '确定删除吗?'
}
}
const datasource = {
datasource: '数据源',
create_datasource: '创建数据源',
search_input_tips: '请输入关键字',
datasource_name: '数据源名称',
datasource_name_tips: '请输入数据源名称',
datasource_user_name: '所属用户',
datasource_type: '数据源类型',
datasource_parameter: '数据源参数',
description: '描述',
description_tips: '请输入描述',
create_time: '创建时间',
update_time: '更新时间',
operation: '操作',
click_to_view: '点击查看',
delete: '删除',
confirm: '确定',
cancel: '取消',
create: '创建',
edit: '编辑',
success: '成功',
test_connect: '测试连接',
ip: 'IP主机名',
ip_tips: '请输入IP主机名',
port: '端口',
port_tips: '请输入端口',
database_name: '数据库名',
database_name_tips: '请输入数据库名',
oracle_connect_type: '服务名或SID',
oracle_connect_type_tips: '请选择服务名或SID',
oracle_service_name: '服务名',
oracle_sid: 'SID',
jdbc_connect_parameters: 'jdbc连接参数',
principal_tips: '请输入Principal',
krb5_conf_tips: '请输入kerberos认证参数 java.security.krb5.conf',
keytab_username_tips: '请输入kerberos认证参数 login.user.keytab.username',
keytab_path_tips: '请输入kerberos认证参数 login.user.keytab.path',
format_tips: '请输入格式为',
connection_parameter: '连接参数',
user_name: '用户名',
user_name_tips: '请输入用户名',
user_password: '密码',
user_password_tips: '请输入密码'
}
const data_quality = {
task_result: {
task_name: '任务名称',
workflow_instance: '工作流实例',
rule_type: '规则类型',
rule_name: '规则名称',
state: '状态',
actual_value: '实际值',
excepted_value: '期望值',
check_type: '检测类型',
operator: '操作符',
threshold: '阈值',
failure_strategy: '失败策略',
excepted_value_type: '期望值类型',
error_output_path: '错误数据路径',
username: '用户名',
create_time: '创建时间',
update_time: '更新时间',
undone: '未完成',
success: '成功',
failure: '失败',
single_table: '单表检测',
single_table_custom_sql: '自定义SQL',
multi_table_accuracy: '多表准确性',
multi_table_comparison: '两表值对比',
expected_and_actual_or_expected: '(期望值-实际值)/实际值 x 100%',
expected_and_actual: '期望值-实际值',
actual_and_expected: '实际值-期望值',
actual_or_expected: '实际值/期望值 x 100%'
},
rule: {
actions: '操作',
name: '规则名称',
type: '规则类型',
username: '用户名',
create_time: '创建时间',
update_time: '更新时间',
input_item: '规则输入项',
view_input_item: '查看规则输入项信息',
input_item_title: '输入项标题',
input_item_placeholder: '输入项占位符',
input_item_type: '输入项类型',
src_connector_type: '源数据类型',
src_datasource_id: '源数据源',
src_table: '源数据表',
src_filter: '源表过滤条件',
src_field: '源表检测列',
statistics_name: '实际值名',
check_type: '校验方式',
operator: '校验操作符',
threshold: '阈值',
failure_strategy: '失败策略',
target_connector_type: '目标数据类型',
target_datasource_id: '目标数据源',
target_table: '目标数据表',
target_filter: '目标表过滤条件',
mapping_columns: 'ON语句',
statistics_execute_sql: '实际值计算SQL',
comparison_name: '期望值名',
comparison_execute_sql: '期望值计算SQL',
comparison_type: '期望值类型',
writer_connector_type: '输出数据类型',
writer_datasource_id: '输出数据源',
target_field: '目标表检测列',
field_length: '字段长度限制',
logic_operator: '逻辑操作符',
regexp_pattern: '正则表达式',
deadline: '截止时间',
datetime_format: '时间格式',
enum_list: '枚举值列表',
begin_time: '起始时间',
fix_value: '固定值',
null_check: '空值检测',
custom_sql: '自定义SQL',
single_table: '单表检测',
multi_table_accuracy: '多表准确性',
multi_table_value_comparison: '两表值比对',
field_length_check: '字段长度校验',
uniqueness_check: '唯一性校验',
regexp_check: '正则表达式',
timeliness_check: '及时性校验',
enumeration_check: '枚举值校验',
table_count_check: '表行数校验',
all: '全部',
FixValue: '固定值',
DailyAvg: '日均值',
WeeklyAvg: '周均值',
MonthlyAvg: '月均值',
Last7DayAvg: '最近7天均值',
Last30DayAvg: '最近30天均值',
SrcTableTotalRows: '源表总行数',
TargetTableTotalRows: '目标表总行数'
}
}
const crontab = {
second: '秒',
minute: '分',
hour: '时',
day: '天',
month: '月',
year: '年',
monday: '星期一',
tuesday: '星期二',
wednesday: '星期三',
thursday: '星期四',
friday: '星期五',
saturday: '星期六',
sunday: '星期天',
every_second: '每一秒钟',
every: '每隔',
second_carried_out: '秒执行 从',
second_start: '秒开始',
specific_second: '具体秒数(可多选)',
specific_second_tip: '请选择具体秒数',
cycle_from: '周期从',
to: '到',
every_minute: '每一分钟',
minute_carried_out: '分执行 从',
minute_start: '分开始',
specific_minute: '具体分钟数(可多选)',
specific_minute_tip: '请选择具体分钟数',
every_hour: '每一小时',
hour_carried_out: '小时执行 从',
hour_start: '小时开始',
specific_hour: '具体小时数(可多选)',
specific_hour_tip: '请选择具体小时数',
every_day: '每一天',
week_carried_out: '周执行 从',
start: '开始',
day_carried_out: '天执行 从',
day_start: '天开始',
specific_week: '具体星期几(可多选)',
specific_week_tip: '请选择具体周几',
specific_day: '具体天数(可多选)',
specific_day_tip: '请选择具体天数',
last_day_of_month: '在这个月的最后一天',
last_work_day_of_month: '在这个月的最后一个工作日',
last_of_month: '在这个月的最后一个',
before_end_of_month: '在本月底前',
recent_business_day_to_month: '最近的工作日(周一至周五)至本月',
in_this_months: '在这个月的第',
every_month: '每一月',
month_carried_out: '月执行 从',
month_start: '月开始',
specific_month: '具体月数(可多选)',
specific_month_tip: '请选择具体月数',
every_year: '每一年',
year_carried_out: '年执行 从',
year_start: '年开始',
specific_year: '具体年数(可多选)',
specific_year_tip: '请选择具体年数',
one_hour: '小时',
one_day: '日'
}
export default {
login,
modal,
theme,
userDropdown,
menu,
home,
password,
profile,
monitor,
resource,
project,
security,
datasource,
data_quality,
crontab
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,808 | [Bug-FE][UI Next][V1.0.0-Alpha]Task instance logs are not automatically updated | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
Task instance logs are not automatically updated
<img width="1917" alt="image" src="https://user-images.githubusercontent.com/76080484/157622048-000faeae-86d3-469c-98fd-d3ae26f4aa9c.png">
### What you expected to happen
Task instance logs can be updated automatically
### How to reproduce
1. Execute a long workflow
2. View task instance logs
3. The log is not updated. You need to close the log window and open it again to update the latest log
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8808 | https://github.com/apache/dolphinscheduler/pull/8912 | 32a5ccac72682b2efc1639a46d0fd6458b37216e | bde7d52c5d8322ebc6897771cc10725aefb19e9b | "2022-03-10T08:42:36Z" | java | "2022-03-15T12:58:10Z" | dolphinscheduler-ui-next/src/views/projects/task/instance/components/log-modal.tsx | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { defineComponent, PropType, toRefs, watch } from 'vue'
import { useI18n } from 'vue-i18n'
import { NLog } from 'naive-ui'
import { useModal } from './use-modal'
import Modal from '@/components/modal'
const props = {
showModalRef: {
type: Boolean as PropType<boolean>,
default: false
},
row: {
type: Object as PropType<any>,
default: {}
}
}
const LogModal = defineComponent({
name: 'LogModal',
props,
emits: ['confirmModal'],
setup(props, ctx) {
const { t } = useI18n()
const { variables, getLogs } = useModal()
const confirmModal = () => {
ctx.emit('confirmModal', props.showModalRef)
}
watch(
() => props.showModalRef,
() => {
if (props.showModalRef) {
variables.id = props.row.id
props.showModalRef && variables.id && getLogs()
} else {
variables.id = ''
variables.logRef = ''
variables.loadingRef = true
variables.skipLineNum = 0
variables.limit = 1000
}
}
)
return { t, ...toRefs(variables), confirmModal }
},
render() {
const { t } = this
return (
<Modal
title={t('project.task.view_log')}
show={this.showModalRef}
cancelShow={false}
onConfirm={this.confirmModal}
style={{ width: '60%' }}
>
<NLog rows={30} log={this.logRef} loading={this.loadingRef} />
</Modal>
)
}
})
export default LogModal
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,592 | [Feature][python] Add CI auto remove unused package | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We need a package to auto remove unused package instead of remove by hand
this issue started when issue #8591 is done
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8592 | https://github.com/apache/dolphinscheduler/pull/8897 | eefe7896c81b74ce56d80e354aaa3f8e1e02e3b3 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | "2022-03-01T03:44:39Z" | java | "2022-03-16T01:33:26Z" | dolphinscheduler-python/pydolphinscheduler/.pre-commit-config.yaml | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
default_stages: [commit, push]
default_language_version:
# force all python hooks to run python3
python: python3
repos:
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/psf/black
rev: 22.1.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8
additional_dependencies: [
'flake8-docstrings>=1.6',
'flake8-black>=0.2',
]
# pre-commit run in the root, so we have to point out the full path of configuration
args: [
--config,
dolphinscheduler-python/pydolphinscheduler/.flake8
]
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,592 | [Feature][python] Add CI auto remove unused package | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We need a package to auto remove unused package instead of remove by hand
this issue started when issue #8591 is done
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8592 | https://github.com/apache/dolphinscheduler/pull/8897 | eefe7896c81b74ce56d80e354aaa3f8e1e02e3b3 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | "2022-03-01T03:44:39Z" | java | "2022-03-16T01:33:26Z" | dolphinscheduler-python/pydolphinscheduler/DEVELOP.md | <!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Develop
pydolphinscheduler is python API for Apache DolphinScheduler, it just defines what workflow look like instead of
store or execute it. We here use [py4j][py4j] to dynamically access Java Virtual Machine.
## Setup Develop Environment
**PyDolphinScheduler** use GitHub to hold all source code, you should clone the code before you do same change.
```shell
git clone git@github.com:apache/dolphinscheduler.git
```
Now, we should install all dependence to make sure we could run test or check code style locally
```shell
cd dolphinscheduler/dolphinscheduler-python/pydolphinscheduler
pip install .[dev]
```
Next, we have to open pydolphinscheduler project in you editor. We recommend you use [pycharm][pycharm]
instead of [IntelliJ IDEA][idea] to open it. And you could just open directory
`dolphinscheduler-python/pydolphinscheduler` instead of `dolphinscheduler-python`.
## Brief Concept
Apache DolphinScheduler is design to define workflow by UI, and pydolphinscheduler try to define it by code. When
define by code, user usually do not care user, tenant, or queue exists or not. All user care about is created
a new workflow by the code his/her definition. So we have some **side object** in `pydolphinscheduler/side`
directory, their only check object exists or not, and create them if not exists.
### Process Definition
pydolphinscheduler workflow object name, process definition is also same name as Java object(maybe would be change to
other word for more simple).
### Tasks
pydolphinscheduler tasks object, we use tasks to define exact job we want DolphinScheduler do for us. For now,
we only support `shell` task to execute shell task. [This link][all-task] list all tasks support in DolphinScheduler
and would be implemented in the further.
## Test Your Code
Linting and tests is very important for open source project, so we pay more attention to it. We have continuous
integration service run by GitHub Action to test whether the patch is good or not, which you could jump to
section [With GitHub Action](#with-github-action) see more detail.
And to make more convenience to local tests, we also have the way to run your [test automated with tox](#automated-testing-with-tox)
locally. It is helpful when your try to find out the detail when continuous integration in GitHub Action failed,
or you have a great patch and want to test local first.
Besides [automated testing with tox](#automated-testing-with-tox) locally, we also have a [manual way](#manually)
run tests. And it is scattered commands to reproduce each step of the integration test we told about.
* Remote
* [With GitHub Action](#with-github-action)
* Local
* [Automated Testing With tox](#automated-testing-with-tox)
* [Manually](#manually)
### With GitHub Action
GitHub Action test in various environment for pydolphinscheduler, including different python version in
`3.6|3.7|3.8|3.9` and operating system `linux|macOS|windows`. It will trigger and run automatically when you
submit pull requests to `apache/dolphinscheduler`.
### Automated Testing With tox
[tox](https://tox.wiki) is a package aims to automate and standardize testing in Python, both our continuous
integration and local test use it to run actual task. To use it, you should install it first
```shell
python -m pip install --upgrade tox
```
After installation, you could run a single command to run all the tests, it is almost like test in GitHub Action
but not so much different environment.
```shell
tox -e local-ci
```
It will take a while when you run it the first time, because it has to install dependencies and make some prepare,
and the next time you run it will be faster.
### Manually
#### Code Style
We use [isort][isort] to automatically keep Python imports alphabetically, and use [Black][black] for code
formatter and [Flake8][flake8] for pep8 checker. If you use [pycharm][pycharm]or [IntelliJ IDEA][idea],
maybe you could follow [Black-integration][black-editor] to configure them in your environment.
Our Python API CI would automatically run code style checker and unittest when you submit pull request in
GitHub, you could also run static check locally.
We recommend [pre-commit](https://pre-commit.com/) to do the checker mentioned above before you develop locally.
You should install `pre-commit` by running
```shell
python -m pip install pre-commit
```
in your development environment and then run `pre-commit install` to set up the git hooks scripts. After finish
above steps, each time you run `git commit` or `git push` would run pre-commit check to make basic check before
you create pull requests in GitHub.
```shell
# We recommend you run isort and Black before Flake8, because Black could auto fix some code style issue
# but Flake8 just hint when code style not match pep8
# Run Isort
python -m isort .
# Run Black
python -m black .
# Run Flake8
python -m flake8
```
#### Testing
pydolphinscheduler using [pytest][pytest] to run all tests in directory `tests`. You could run tests by the commands
```shell
python -m pytest --cov=pydolphinscheduler --cov-config=.coveragerc tests/
```
Besides run tests, it will also check the unit test [coverage][coverage] threshold, for now when test cover less than 90%
will fail the coverage, as well as our GitHub Action.
The command above will check test coverage automatically, and you could also test the coverage by command.
```shell
python -m coverage run && python -m coverage report
```
It would not only run unit test but also show each file coverage which cover rate less than 100%, and `TOTAL`
line show you total coverage of you code. If your CI failed with coverage you could go and find some reason by
this command output.
## Add LICENSE When New Dependencies Adding
When you add a new package in pydolphinscheduler, you should also add the package's LICENSE to directory
`dolphinscheduler-dist/release-docs/licenses/python-api-licenses`, and also add a short description to
`dolphinscheduler-dist/release-docs/LICENSE`.
## Update `UPDATING.md` when public class, method or interface is be changed
When you change public class, method or interface, you should change the [UPDATING.md](./UPDATING.md) to notice
users who may use it in other way.
<!-- content -->
[py4j]: https://www.py4j.org/index.html
[pycharm]: https://www.jetbrains.com/pycharm
[idea]: https://www.jetbrains.com/idea/
[all-task]: https://dolphinscheduler.apache.org/en-us/docs/dev/user_doc/guide/task/shell.html
[pytest]: https://docs.pytest.org/en/latest/
[black]: https://black.readthedocs.io/en/stable/index.html
[flake8]: https://flake8.pycqa.org/en/latest/index.html
[black-editor]: https://black.readthedocs.io/en/stable/integrations/editors.html#pycharm-intellij-idea
[coverage]: https://coverage.readthedocs.io/en/stable/
[isort]: https://pycqa.github.io/isort/index.html
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,592 | [Feature][python] Add CI auto remove unused package | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We need a package to auto remove unused package instead of remove by hand
this issue started when issue #8591 is done
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8592 | https://github.com/apache/dolphinscheduler/pull/8897 | eefe7896c81b74ce56d80e354aaa3f8e1e02e3b3 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | "2022-03-01T03:44:39Z" | java | "2022-03-16T01:33:26Z" | dolphinscheduler-python/pydolphinscheduler/setup.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The script for setting up pydolphinscheduler."""
import logging
import os
import sys
from distutils.dir_util import remove_tree
from os.path import dirname, join
from typing import List
from setuptools import Command, find_packages, setup
if sys.version_info[0] < 3:
raise Exception(
"pydolphinscheduler does not support Python 2. Please upgrade to Python 3."
)
logger = logging.getLogger(__name__)
version = "2.0.4"
# Start package required
prod = [
"click>=8.0.0",
"py4j~=0.10",
"ruamel.yaml",
]
build = [
"build",
"setuptools>=42",
"wheel",
]
doc = [
"sphinx>=4.3",
"sphinx_rtd_theme>=1.0",
"sphinx-click>=3.0",
]
test = [
"pytest>=6.2",
"freezegun>=1.1",
"coverage>=6.1",
"pytest-cov>=3.0",
]
style = [
"flake8>=4.0",
"flake8-docstrings>=1.6",
"flake8-black>=0.2",
"isort>=5.10",
]
dev = style + test + doc + build
all_dep = prod + dev
# End package required
def read(*names, **kwargs):
"""Read file content from given file path."""
return open(
join(dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8")
).read()
class CleanCommand(Command):
"""Command to clean up python api before setup by running `python setup.py pre_clean`."""
description = "Clean up project root"
user_options: List[str] = []
clean_list = [
"build",
"htmlcov",
"dist",
".pytest_cache",
".coverage",
]
def initialize_options(self) -> None:
"""Set default values for options."""
pass
def finalize_options(self) -> None:
"""Set final values for options."""
pass
def run(self) -> None:
"""Run and remove temporary files."""
for cl in self.clean_list:
if not os.path.exists(cl):
logger.info("Path %s do not exists.", cl)
elif os.path.isdir(cl):
remove_tree(cl)
else:
os.remove(cl)
logger.info("Finish pre_clean process.")
setup(
name="apache-dolphinscheduler",
version=version,
license="Apache License 2.0",
description="Apache DolphinScheduler Python API",
long_description=read("README.md"),
# Make sure pypi is expecting markdown
long_description_content_type="text/markdown",
author="Apache Software Foundation",
author_email="dev@dolphinscheduler.apache.org",
url="https://dolphinscheduler.apache.org/",
python_requires=">=3.6",
keywords=[
"dolphinscheduler",
"workflow",
"scheduler",
"taskflow",
],
project_urls={
"Homepage": "https://dolphinscheduler.apache.org",
"Documentation": "https://dolphinscheduler.apache.org/python/index.html",
"Source": "https://github.com/apache/dolphinscheduler/tree/dev/dolphinscheduler-python/"
"pydolphinscheduler",
"Issue Tracker": "https://github.com/apache/dolphinscheduler/issues?"
"q=is%3Aissue+is%3Aopen+label%3APython",
"Discussion": "https://github.com/apache/dolphinscheduler/discussions",
"Twitter": "https://twitter.com/dolphinschedule",
},
packages=find_packages(where="src"),
package_dir={"": "src"},
include_package_data=True,
package_data={
"pydolphinscheduler": ["core/default_config.yaml"],
},
platforms=["any"],
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: Unix",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: User Interfaces",
],
install_requires=prod,
extras_require={
"all": all_dep,
"dev": dev,
"style": style,
"test": test,
"doc": doc,
"build": build,
},
cmdclass={
"pre_clean": CleanCommand,
},
entry_points={
"console_scripts": [
"pydolphinscheduler = pydolphinscheduler.cli.commands:cli",
],
},
)
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,592 | [Feature][python] Add CI auto remove unused package | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We need a package to auto remove unused package instead of remove by hand
this issue started when issue #8591 is done
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8592 | https://github.com/apache/dolphinscheduler/pull/8897 | eefe7896c81b74ce56d80e354aaa3f8e1e02e3b3 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | "2022-03-01T03:44:39Z" | java | "2022-03-16T01:33:26Z" | dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exceptions for pydolphinscheduler."""
class PyDSBaseException(Exception):
"""Base exception for pydolphinscheduler."""
pass
class PyDSParamException(PyDSBaseException):
"""Exception for pydolphinscheduler parameter verify error."""
pass
class PyDSTaskNoFoundException(PyDSBaseException):
"""Exception for pydolphinscheduler workflow task no found error."""
pass
class PyDSJavaGatewayException(PyDSBaseException):
"""Exception for pydolphinscheduler Java gateway error."""
pass
class PyDSProcessDefinitionNotAssignException(PyDSBaseException):
"""Exception for pydolphinscheduler process definition not assign error."""
class PyDSConfException(PyDSBaseException):
"""Exception for pydolphinscheduler configuration error."""
pass
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,592 | [Feature][python] Add CI auto remove unused package | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar feature requirement.
### Description
We need a package to auto remove unused package instead of remove by hand
this issue started when issue #8591 is done
### Use case
_No response_
### Related issues
_No response_
### Are you willing to submit a PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8592 | https://github.com/apache/dolphinscheduler/pull/8897 | eefe7896c81b74ce56d80e354aaa3f8e1e02e3b3 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | "2022-03-01T03:44:39Z" | java | "2022-03-16T01:33:26Z" | dolphinscheduler-python/pydolphinscheduler/tox.ini | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
[tox]
envlist = local-ci, lint, doc-build-test, code-test, py{36,37,38,39}
[testenv]
whitelist_externals = make
[testenv:lint]
extras = style
commands =
python -m isort --check .
python -m black --check .
python -m flake8
[testenv:code-test]
extras = test
# Run both tests and coverage
commands =
python -m pytest --cov=pydolphinscheduler --cov-config={toxinidir}/.coveragerc tests/
[testenv:doc-build-test]
extras = doc
commands =
make -C {toxinidir}/docs clean
make -C {toxinidir}/docs html
[testenv:local-ci]
extras = dev
commands =
{[testenv:lint]commands}
{[testenv:code-test]commands}
{[testenv:doc-build-test]commands}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 8,913 | [Bug] [Worker] roll view log error | ### Search before asking
- [X] I had searched in the [issues](https://github.com/apache/dolphinscheduler/issues?q=is%3Aissue) and found no similar issues.
### What happened
```
java.lang.NullPointerException: null
at java.lang.String.startsWith(String.java:1405)
at java.lang.String.startsWith(String.java:1434)
at org.apache.dolphinscheduler.server.log.LoggerRequestProcessor.checkPathSecurity(LoggerRequestProcessor.java:149)
at org.apache.dolphinscheduler.server.log.LoggerRequestProcessor.process(LoggerRequestProcessor.java:103)
at org.apache.dolphinscheduler.remote.handler.NettyServerHandler.lambda$processReceived$0(NettyServerHandler.java:129)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
[ERROR] 2022-03-15 12:46:54.403 org.apache.dolphinscheduler.service.log.LogClientService:[101] - roll view log error
```
### What you expected to happen
roll view log is normal
### How to reproduce
View the online log of a task
### Anything else
_No response_
### Version
dev
### Are you willing to submit PR?
- [X] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct)
| https://github.com/apache/dolphinscheduler/issues/8913 | https://github.com/apache/dolphinscheduler/pull/8914 | e53ac4e304c3b0d80a710d27d1ebc66d60a43822 | 5ad73f8352ae74d2f8804d75e00eb2712c6be879 | "2022-03-15T11:00:07Z" | java | "2022-03-16T02:09:29Z" | dolphinscheduler-log-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.log;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.log.GetLogBytesRequestCommand;
import org.apache.dolphinscheduler.remote.command.log.GetLogBytesResponseCommand;
import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogRequestCommand;
import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogResponseCommand;
import org.apache.dolphinscheduler.remote.command.log.RollViewLogRequestCommand;
import org.apache.dolphinscheduler.remote.command.log.RollViewLogResponseCommand;
import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand;
import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Constants;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import io.netty.channel.Channel;
/**
* logger request process logic
*/
@Component
public class LoggerRequestProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(LoggerRequestProcessor.class);
private final ExecutorService executor;
public LoggerRequestProcessor() {
this.executor = Executors.newFixedThreadPool(Constants.CPUS * 2 + 1);
}
@Override
public void process(Channel channel, Command command) {
logger.info("received command : {}", command);
//request task log command type
final CommandType commandType = command.getType();
switch (commandType) {
case GET_LOG_BYTES_REQUEST:
GetLogBytesRequestCommand getLogRequest = JSONUtils.parseObject(
command.getBody(), GetLogBytesRequestCommand.class);
String path = getLogRequest.getPath();
if (!checkPathSecurity(path)) {
throw new IllegalArgumentException("Illegal path");
}
byte[] bytes = getFileContentBytes(path);
GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes);
channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque()));
break;
case VIEW_WHOLE_LOG_REQUEST:
ViewLogRequestCommand viewLogRequest = JSONUtils.parseObject(
command.getBody(), ViewLogRequestCommand.class);
String viewLogPath = viewLogRequest.getPath();
if (!checkPathSecurity(viewLogPath)) {
throw new IllegalArgumentException("Illegal path");
}
String msg = LoggerUtils.readWholeFileContent(viewLogPath);
ViewLogResponseCommand viewLogResponse = new ViewLogResponseCommand(msg);
channel.writeAndFlush(viewLogResponse.convert2Command(command.getOpaque()));
break;
case ROLL_VIEW_LOG_REQUEST:
RollViewLogRequestCommand rollViewLogRequest = JSONUtils.parseObject(
command.getBody(), RollViewLogRequestCommand.class);
String rollViewLogPath = rollViewLogRequest.getPath();
if (!checkPathSecurity(rollViewLogPath)) {
throw new IllegalArgumentException("Illegal path");
}
List<String> lines = readPartFileContent(rollViewLogPath,
rollViewLogRequest.getSkipLineNum(), rollViewLogRequest.getLimit());
StringBuilder builder = new StringBuilder();
for (String line : lines) {
builder.append(line).append("\r\n");
}
RollViewLogResponseCommand rollViewLogRequestResponse = new RollViewLogResponseCommand(builder.toString());
channel.writeAndFlush(rollViewLogRequestResponse.convert2Command(command.getOpaque()));
break;
case REMOVE_TAK_LOG_REQUEST:
RemoveTaskLogRequestCommand removeTaskLogRequest = JSONUtils.parseObject(
command.getBody(), RemoveTaskLogRequestCommand.class);
String taskLogPath = removeTaskLogRequest.getPath();
if (!checkPathSecurity(taskLogPath)) {
throw new IllegalArgumentException("Illegal path");
}
File taskLogFile = new File(taskLogPath);
boolean status = true;
try {
if (taskLogFile.exists()) {
status = taskLogFile.delete();
}
} catch (Exception e) {
status = false;
}
RemoveTaskLogResponseCommand removeTaskLogResponse = new RemoveTaskLogResponseCommand(status);
channel.writeAndFlush(removeTaskLogResponse.convert2Command(command.getOpaque()));
break;
default:
throw new IllegalArgumentException("unknown commandType");
}
}
/**
* LogServer only can read the logs dir.
* @param path
* @return
*/
private boolean checkPathSecurity(String path) {
String dsHome = System.getProperty("DOLPHINSCHEDULER_WORKER_HOME");
if (path.startsWith(dsHome) && !path.contains("../") && path.endsWith(".log")) {
return true;
}
return false;
}
public ExecutorService getExecutor() {
return this.executor;
}
/**
* get files content bytes for download file
*
* @param filePath file path
* @return byte array of file
*/
private byte[] getFileContentBytes(String filePath) {
try (InputStream in = new FileInputStream(filePath);
ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) != -1) {
bos.write(buf, 0, len);
}
return bos.toByteArray();
} catch (IOException e) {
logger.error("get file bytes error", e);
}
return new byte[0];
}
/**
* read part file content,can skip any line and read some lines
*
* @param filePath file path
* @param skipLine skip line
* @param limit read lines limit
* @return part file content
*/
private List<String> readPartFileContent(String filePath,
int skipLine,
int limit) {
File file = new File(filePath);
if (file.exists() && file.isFile()) {
try (Stream<String> stream = Files.lines(Paths.get(filePath))) {
return stream.skip(skipLine).limit(limit).collect(Collectors.toList());
} catch (IOException e) {
logger.error("read file error", e);
}
} else {
logger.info("file path: {} not exists", filePath);
}
return Collections.emptyList();
}
}
|